diff --git a/cpp/src/gandiva/function_registry_common.h b/cpp/src/gandiva/function_registry_common.h index abe861e3385e..f0b6f0569dfd 100644 --- a/cpp/src/gandiva/function_registry_common.h +++ b/cpp/src/gandiva/function_registry_common.h @@ -55,6 +55,13 @@ inline DataTypePtr time32() { return arrow::time32(arrow::TimeUnit::MILLI); } inline DataTypePtr time64() { return arrow::time64(arrow::TimeUnit::MICRO); } inline DataTypePtr timestamp() { return arrow::timestamp(arrow::TimeUnit::MILLI); } + +// Precision-specific timestamp types for explicit time unit handling +inline DataTypePtr timestamp_sec() { return arrow::timestamp(arrow::TimeUnit::SECOND); } +inline DataTypePtr timestamp_ms() { return arrow::timestamp(arrow::TimeUnit::MILLI); } +inline DataTypePtr timestamp_us() { return arrow::timestamp(arrow::TimeUnit::MICRO); } +inline DataTypePtr timestamp_ns() { return arrow::timestamp(arrow::TimeUnit::NANO); } + inline DataTypePtr decimal128() { return arrow::decimal128(38, 0); } struct KeyHash { @@ -289,6 +296,14 @@ typedef std::unordered_map ALIASES, DataTypeVector{TYPE()}, \ + int64(), kResultNullIfNull, ARROW_STRINGIFY(NAME##_##TYPE)) + +// Precision-aware date truncation function for timestamp types +// Returns timestamp in the same precision as input +#define DATE_TRUNC_TIMESTAMP_PRECISION(NAME, ALIASES, TYPE) \ + NativeFunction(#NAME, std::vector ALIASES, DataTypeVector{TYPE()}, \ + TYPE(), kResultNullIfNull, ARROW_STRINGIFY(NAME##_##TYPE)) + std::vector GetDateTimeFunctionRegistry() { static std::vector date_time_fn_registry_ = { UNARY_SAFE_NULL_NEVER_BOOL(isnull, {}, day_time_interval), @@ -62,6 +74,119 @@ std::vector GetDateTimeFunctionRegistry() { NEXT_DAY_FNS(next_day), + // Precision-specific extract functions for all timestamp time units + TIMESTAMP_PRECISION_TYPES(EXTRACT_TIMESTAMP_PRECISION, extractYear, {"extract_year"}), + TIMESTAMP_PRECISION_TYPES(EXTRACT_TIMESTAMP_PRECISION, extractMonth, {"extract_month"}), + TIMESTAMP_PRECISION_TYPES(EXTRACT_TIMESTAMP_PRECISION, extractDay, {"extract_day"}), + TIMESTAMP_PRECISION_TYPES(EXTRACT_TIMESTAMP_PRECISION, extractHour, {"extract_hour"}), + TIMESTAMP_PRECISION_TYPES(EXTRACT_TIMESTAMP_PRECISION, extractMinute, {"extract_minute"}), + TIMESTAMP_PRECISION_TYPES(EXTRACT_TIMESTAMP_PRECISION, extractSecond, {"extract_second"}), + TIMESTAMP_PRECISION_TYPES(EXTRACT_TIMESTAMP_PRECISION, extractDoy, {"extract_doy"}), + TIMESTAMP_PRECISION_TYPES(EXTRACT_TIMESTAMP_PRECISION, extractDow, {"extract_dow"}), + TIMESTAMP_PRECISION_TYPES(EXTRACT_TIMESTAMP_PRECISION, extractWeek, {"extract_week"}), + TIMESTAMP_PRECISION_TYPES(EXTRACT_TIMESTAMP_PRECISION, extractQuarter, {"extract_quarter"}), + TIMESTAMP_PRECISION_TYPES(EXTRACT_TIMESTAMP_PRECISION, extractEpoch, {"extract_epoch"}), + TIMESTAMP_PRECISION_TYPES(EXTRACT_TIMESTAMP_PRECISION, extractMillennium, {"extract_millennium"}), + TIMESTAMP_PRECISION_TYPES(EXTRACT_TIMESTAMP_PRECISION, extractCentury, {"extract_century"}), + TIMESTAMP_PRECISION_TYPES(EXTRACT_TIMESTAMP_PRECISION, extractDecade, {"extract_decade"}), + + // Precision-specific date_trunc functions for all timestamp time units + TIMESTAMP_PRECISION_TYPES(DATE_TRUNC_TIMESTAMP_PRECISION, date_trunc_Second, {}), + TIMESTAMP_PRECISION_TYPES(DATE_TRUNC_TIMESTAMP_PRECISION, date_trunc_Minute, {}), + TIMESTAMP_PRECISION_TYPES(DATE_TRUNC_TIMESTAMP_PRECISION, date_trunc_Hour, {}), + TIMESTAMP_PRECISION_TYPES(DATE_TRUNC_TIMESTAMP_PRECISION, date_trunc_Day, {}), + TIMESTAMP_PRECISION_TYPES(DATE_TRUNC_TIMESTAMP_PRECISION, date_trunc_Week, {}), + TIMESTAMP_PRECISION_TYPES(DATE_TRUNC_TIMESTAMP_PRECISION, date_trunc_Month, {}), + TIMESTAMP_PRECISION_TYPES(DATE_TRUNC_TIMESTAMP_PRECISION, date_trunc_Quarter, {}), + TIMESTAMP_PRECISION_TYPES(DATE_TRUNC_TIMESTAMP_PRECISION, date_trunc_Year, {}), + TIMESTAMP_PRECISION_TYPES(DATE_TRUNC_TIMESTAMP_PRECISION, date_trunc_Decade, {}), + TIMESTAMP_PRECISION_TYPES(DATE_TRUNC_TIMESTAMP_PRECISION, date_trunc_Century, {}), + TIMESTAMP_PRECISION_TYPES(DATE_TRUNC_TIMESTAMP_PRECISION, date_trunc_Millennium, {}), + + // Sub-millisecond truncation (only for higher precision types) + NativeFunction("date_trunc_Millisecond", {}, DataTypeVector{timestamp_us()}, + timestamp_us(), kResultNullIfNull, "date_trunc_Millisecond_timestamp_us"), + NativeFunction("date_trunc_Millisecond", {}, DataTypeVector{timestamp_ns()}, + timestamp_ns(), kResultNullIfNull, "date_trunc_Millisecond_timestamp_ns"), + NativeFunction("date_trunc_Microsecond", {}, DataTypeVector{timestamp_ns()}, + timestamp_ns(), kResultNullIfNull, "date_trunc_Microsecond_timestamp_ns"), + + // Precision-specific cast between timestamp types + NativeFunction("castTIMESTAMP", {}, DataTypeVector{timestamp_sec()}, + timestamp_ms(), kResultNullIfNull, "castTIMESTAMP_ms_timestamp_sec"), + NativeFunction("castTIMESTAMP", {}, DataTypeVector{timestamp_sec()}, + timestamp_us(), kResultNullIfNull, "castTIMESTAMP_us_timestamp_sec"), + NativeFunction("castTIMESTAMP", {}, DataTypeVector{timestamp_sec()}, + timestamp_ns(), kResultNullIfNull, "castTIMESTAMP_ns_timestamp_sec"), + NativeFunction("castTIMESTAMP", {}, DataTypeVector{timestamp_ms()}, + timestamp_sec(), kResultNullIfNull, "castTIMESTAMP_sec_timestamp_ms"), + NativeFunction("castTIMESTAMP", {}, DataTypeVector{timestamp_ms()}, + timestamp_us(), kResultNullIfNull, "castTIMESTAMP_us_timestamp_ms"), + NativeFunction("castTIMESTAMP", {}, DataTypeVector{timestamp_ms()}, + timestamp_ns(), kResultNullIfNull, "castTIMESTAMP_ns_timestamp_ms"), + NativeFunction("castTIMESTAMP", {}, DataTypeVector{timestamp_us()}, + timestamp_sec(), kResultNullIfNull, "castTIMESTAMP_sec_timestamp_us"), + NativeFunction("castTIMESTAMP", {}, DataTypeVector{timestamp_us()}, + timestamp_ms(), kResultNullIfNull, "castTIMESTAMP_ms_timestamp_us"), + NativeFunction("castTIMESTAMP", {}, DataTypeVector{timestamp_us()}, + timestamp_ns(), kResultNullIfNull, "castTIMESTAMP_ns_timestamp_us"), + NativeFunction("castTIMESTAMP", {}, DataTypeVector{timestamp_ns()}, + timestamp_sec(), kResultNullIfNull, "castTIMESTAMP_sec_timestamp_ns"), + NativeFunction("castTIMESTAMP", {}, DataTypeVector{timestamp_ns()}, + timestamp_ms(), kResultNullIfNull, "castTIMESTAMP_ms_timestamp_ns"), + NativeFunction("castTIMESTAMP", {}, DataTypeVector{timestamp_ns()}, + timestamp_us(), kResultNullIfNull, "castTIMESTAMP_us_timestamp_ns"), + + // Precision-specific castDATE for all timestamp types + NativeFunction("castDATE", {}, DataTypeVector{timestamp_sec()}, + date64(), kResultNullIfNull, "castDATE_timestamp_sec"), + NativeFunction("castDATE", {}, DataTypeVector{timestamp_ms()}, + date64(), kResultNullIfNull, "castDATE_timestamp_ms"), + NativeFunction("castDATE", {}, DataTypeVector{timestamp_us()}, + date64(), kResultNullIfNull, "castDATE_timestamp_us"), + NativeFunction("castDATE", {}, DataTypeVector{timestamp_ns()}, + date64(), kResultNullIfNull, "castDATE_timestamp_ns"), + + // Precision-specific castTIME for all timestamp types + NativeFunction("castTIME", {}, DataTypeVector{timestamp_sec()}, + time32(), kResultNullIfNull, "castTIME_timestamp_sec"), + NativeFunction("castTIME", {}, DataTypeVector{timestamp_ms()}, + time32(), kResultNullIfNull, "castTIME_timestamp_ms"), + NativeFunction("castTIME", {}, DataTypeVector{timestamp_us()}, + time32(), kResultNullIfNull, "castTIME_timestamp_us"), + NativeFunction("castTIME", {}, DataTypeVector{timestamp_ns()}, + time32(), kResultNullIfNull, "castTIME_timestamp_ns"), + + // Precision-specific datediff + NativeFunction("datediff", {}, DataTypeVector{timestamp_sec(), timestamp_sec()}, + int32(), kResultNullIfNull, "datediff_timestamp_sec_timestamp_sec"), + NativeFunction("datediff", {}, DataTypeVector{timestamp_ms(), timestamp_ms()}, + int32(), kResultNullIfNull, "datediff_timestamp_ms_timestamp_ms"), + NativeFunction("datediff", {}, DataTypeVector{timestamp_us(), timestamp_us()}, + int32(), kResultNullIfNull, "datediff_timestamp_us_timestamp_us"), + NativeFunction("datediff", {}, DataTypeVector{timestamp_ns(), timestamp_ns()}, + int32(), kResultNullIfNull, "datediff_timestamp_ns_timestamp_ns"), + + // Precision-specific months_between + NativeFunction("months_between", {}, DataTypeVector{timestamp_sec(), timestamp_sec()}, + float64(), kResultNullIfNull, "months_between_timestamp_sec_timestamp_sec"), + NativeFunction("months_between", {}, DataTypeVector{timestamp_ms(), timestamp_ms()}, + float64(), kResultNullIfNull, "months_between_timestamp_ms_timestamp_ms"), + NativeFunction("months_between", {}, DataTypeVector{timestamp_us(), timestamp_us()}, + float64(), kResultNullIfNull, "months_between_timestamp_us_timestamp_us"), + NativeFunction("months_between", {}, DataTypeVector{timestamp_ns(), timestamp_ns()}, + float64(), kResultNullIfNull, "months_between_timestamp_ns_timestamp_ns"), + + // Precision-specific last_day + NativeFunction("last_day", {}, DataTypeVector{timestamp_sec()}, + date64(), kResultNullIfNull, "last_day_timestamp_sec"), + NativeFunction("last_day", {}, DataTypeVector{timestamp_ms()}, + date64(), kResultNullIfNull, "last_day_timestamp_ms"), + NativeFunction("last_day", {}, DataTypeVector{timestamp_us()}, + date64(), kResultNullIfNull, "last_day_timestamp_us"), + NativeFunction("last_day", {}, DataTypeVector{timestamp_ns()}, + date64(), kResultNullIfNull, "last_day_timestamp_ns"), + NativeFunction("castDATE", {}, DataTypeVector{utf8()}, date64(), kResultNullIfNull, "castDATE_utf8", NativeFunction::kNeedsContext | NativeFunction::kCanReturnErrors), diff --git a/cpp/src/gandiva/function_registry_timestamp_arithmetic.cc b/cpp/src/gandiva/function_registry_timestamp_arithmetic.cc index 188bd60d9eac..252f2c79d949 100644 --- a/cpp/src/gandiva/function_registry_timestamp_arithmetic.cc +++ b/cpp/src/gandiva/function_registry_timestamp_arithmetic.cc @@ -50,6 +50,34 @@ namespace gandiva { BINARY_GENERIC_SAFE_NULL_IF_NULL(name, ALIASES, date64, int64, date64), \ BINARY_GENERIC_SAFE_NULL_IF_NULL(name, ALIASES, timestamp, int64, timestamp) +// Precision-specific timestamp add functions +// Maps to timestampaddSecond_int32_timestamp_sec, etc. +#define TIMESTAMP_ADD_PRECISION_INT32(NAME, ALIASES, TYPE) \ + NativeFunction(#NAME, std::vector ALIASES, \ + DataTypeVector{int32(), TYPE()}, TYPE(), kResultNullIfNull, \ + ARROW_STRINGIFY(NAME##_int32_##TYPE)), \ + NativeFunction(#NAME, std::vector ALIASES, \ + DataTypeVector{TYPE(), int32()}, TYPE(), kResultNullIfNull, \ + ARROW_STRINGIFY(NAME##_##TYPE##_int32)) + +#define TIMESTAMP_ADD_PRECISION_INT64(NAME, ALIASES, TYPE) \ + NativeFunction(#NAME, std::vector ALIASES, \ + DataTypeVector{int64(), TYPE()}, TYPE(), kResultNullIfNull, \ + ARROW_STRINGIFY(NAME##_int64_##TYPE)), \ + NativeFunction(#NAME, std::vector ALIASES, \ + DataTypeVector{TYPE(), int64()}, TYPE(), kResultNullIfNull, \ + ARROW_STRINGIFY(NAME##_##TYPE##_int64)) + +#define TIMESTAMP_ADD_PRECISION_FNS(NAME, ALIASES) \ + TIMESTAMP_ADD_PRECISION_INT32(NAME, ALIASES, timestamp_sec), \ + TIMESTAMP_ADD_PRECISION_INT64(NAME, ALIASES, timestamp_sec), \ + TIMESTAMP_ADD_PRECISION_INT32(NAME, ALIASES, timestamp_ms), \ + TIMESTAMP_ADD_PRECISION_INT64(NAME, ALIASES, timestamp_ms), \ + TIMESTAMP_ADD_PRECISION_INT32(NAME, ALIASES, timestamp_us), \ + TIMESTAMP_ADD_PRECISION_INT64(NAME, ALIASES, timestamp_us), \ + TIMESTAMP_ADD_PRECISION_INT32(NAME, ALIASES, timestamp_ns), \ + TIMESTAMP_ADD_PRECISION_INT64(NAME, ALIASES, timestamp_ns) + std::vector GetDateTimeArithmeticFunctionRegistry() { static std::vector datetime_fn_registry_ = { BINARY_GENERIC_SAFE_NULL_IF_NULL(months_between, {}, date64, date64, float64), @@ -81,7 +109,17 @@ std::vector GetDateTimeArithmeticFunctionRegistry() { DATE_DIFF_FNS(date_sub, {}), DATE_DIFF_FNS(subtract, {}), - DATE_DIFF_FNS(date_diff, {})}; + DATE_DIFF_FNS(date_diff, {}), + + // Precision-specific timestampadd functions + TIMESTAMP_ADD_PRECISION_FNS(timestampaddSecond, {}), + TIMESTAMP_ADD_PRECISION_FNS(timestampaddMinute, {}), + TIMESTAMP_ADD_PRECISION_FNS(timestampaddHour, {}), + TIMESTAMP_ADD_PRECISION_FNS(timestampaddDay, {}), + TIMESTAMP_ADD_PRECISION_FNS(timestampaddWeek, {}), + TIMESTAMP_ADD_PRECISION_FNS(timestampaddMonth, {}), + TIMESTAMP_ADD_PRECISION_FNS(timestampaddQuarter, {}), + TIMESTAMP_ADD_PRECISION_FNS(timestampaddYear, {})}; return datetime_fn_registry_; } diff --git a/cpp/src/gandiva/function_signature.cc b/cpp/src/gandiva/function_signature.cc index 6dc6416178e1..f521e5363ac4 100644 --- a/cpp/src/gandiva/function_signature.cc +++ b/cpp/src/gandiva/function_signature.cc @@ -23,6 +23,7 @@ #include #include +#include "arrow/type.h" #include "arrow/util/checked_cast.h" #include "arrow/util/hash_util.h" #include "arrow/util/logging.h" @@ -80,16 +81,58 @@ bool FunctionSignature::operator==(const FunctionSignature& other) const { return true; } +namespace { + +// Helper to get the time unit from temporal types for hashing +// Returns -1 for non-temporal types +int GetTemporalTypeUnit(const DataTypePtr& type) { + switch (type->id()) { + case arrow::Type::TIMESTAMP: { + auto ts_type = checked_cast(type.get()); + return static_cast(ts_type->unit()); + } + case arrow::Type::TIME32: { + auto t32_type = checked_cast(type.get()); + return static_cast(t32_type->unit()); + } + case arrow::Type::TIME64: { + auto t64_type = checked_cast(type.get()); + return static_cast(t64_type->unit()); + } + case arrow::Type::DURATION: { + auto dur_type = checked_cast(type.get()); + return static_cast(dur_type->unit()); + } + default: + return -1; + } +} + +} // namespace + /// calculated based on name, datatype id of parameters and datatype id -/// of return type. +/// of return type. For temporal types (TIMESTAMP, TIME32, TIME64, DURATION), +/// also includes the time unit to distinguish different precisions. std::size_t FunctionSignature::Hash() const { static const size_t kSeedValue = 17; size_t result = kSeedValue; hash_combine(result, AsciiToLower(base_name_)); hash_combine(result, static_cast(ret_type_->id())); + + // Include time unit for temporal return types + int ret_unit = GetTemporalTypeUnit(ret_type_); + if (ret_unit >= 0) { + hash_combine(result, static_cast(ret_unit)); + } + // not using hash_range since we only want to include the id from the data type for (auto& param_type : param_types_) { hash_combine(result, static_cast(param_type->id())); + // Include time unit for temporal parameter types + int param_unit = GetTemporalTypeUnit(param_type); + if (param_unit >= 0) { + hash_combine(result, static_cast(param_unit)); + } } return result; } diff --git a/cpp/src/gandiva/function_signature_test.cc b/cpp/src/gandiva/function_signature_test.cc index 0eb62d4e7bfb..799880f2af4c 100644 --- a/cpp/src/gandiva/function_signature_test.cc +++ b/cpp/src/gandiva/function_signature_test.cc @@ -110,4 +110,91 @@ TEST_F(TestFunctionSignature, TestHash) { EXPECT_EQ(f3.Hash(), f4.Hash()); } +TEST_F(TestFunctionSignature, TestTimestampPrecisionHash) { + // Different timestamp precisions should have different hashes + FunctionSignature ts_sec("extractYear", + {arrow::timestamp(arrow::TimeUnit::SECOND)}, + arrow::int64()); + FunctionSignature ts_ms("extractYear", + {arrow::timestamp(arrow::TimeUnit::MILLI)}, + arrow::int64()); + FunctionSignature ts_us("extractYear", + {arrow::timestamp(arrow::TimeUnit::MICRO)}, + arrow::int64()); + FunctionSignature ts_ns("extractYear", + {arrow::timestamp(arrow::TimeUnit::NANO)}, + arrow::int64()); + + // All should have different hashes + EXPECT_NE(ts_sec.Hash(), ts_ms.Hash()); + EXPECT_NE(ts_sec.Hash(), ts_us.Hash()); + EXPECT_NE(ts_sec.Hash(), ts_ns.Hash()); + EXPECT_NE(ts_ms.Hash(), ts_us.Hash()); + EXPECT_NE(ts_ms.Hash(), ts_ns.Hash()); + EXPECT_NE(ts_us.Hash(), ts_ns.Hash()); + + // Same precision should have same hash + FunctionSignature ts_sec2("extractYear", + {arrow::timestamp(arrow::TimeUnit::SECOND)}, + arrow::int64()); + EXPECT_EQ(ts_sec.Hash(), ts_sec2.Hash()); +} + +TEST_F(TestFunctionSignature, TestTimestampPrecisionEquals) { + // Different timestamp precisions should NOT be equal + FunctionSignature ts_sec("extractYear", + {arrow::timestamp(arrow::TimeUnit::SECOND)}, + arrow::int64()); + FunctionSignature ts_ms("extractYear", + {arrow::timestamp(arrow::TimeUnit::MILLI)}, + arrow::int64()); + + EXPECT_FALSE(ts_sec == ts_ms); + + // Same precision should be equal + FunctionSignature ts_sec2("extractYear", + {arrow::timestamp(arrow::TimeUnit::SECOND)}, + arrow::int64()); + EXPECT_EQ(ts_sec, ts_sec2); +} + +TEST_F(TestFunctionSignature, TestTimestampReturnTypeHash) { + // Return type precision should also be distinguished + FunctionSignature ret_ms("castTimestamp", + {arrow::utf8()}, + arrow::timestamp(arrow::TimeUnit::MILLI)); + FunctionSignature ret_ns("castTimestamp", + {arrow::utf8()}, + arrow::timestamp(arrow::TimeUnit::NANO)); + + EXPECT_NE(ret_ms.Hash(), ret_ns.Hash()); + EXPECT_FALSE(ret_ms == ret_ns); +} + +TEST_F(TestFunctionSignature, TestTime32PrecisionHash) { + // Time32 types should also have precision-aware hashing + FunctionSignature t32_sec("extractHour", + {arrow::time32(arrow::TimeUnit::SECOND)}, + arrow::int64()); + FunctionSignature t32_ms("extractHour", + {arrow::time32(arrow::TimeUnit::MILLI)}, + arrow::int64()); + + EXPECT_NE(t32_sec.Hash(), t32_ms.Hash()); + EXPECT_FALSE(t32_sec == t32_ms); +} + +TEST_F(TestFunctionSignature, TestTime64PrecisionHash) { + // Time64 types should also have precision-aware hashing + FunctionSignature t64_us("extractHour", + {arrow::time64(arrow::TimeUnit::MICRO)}, + arrow::int64()); + FunctionSignature t64_ns("extractHour", + {arrow::time64(arrow::TimeUnit::NANO)}, + arrow::int64()); + + EXPECT_NE(t64_us.Hash(), t64_ns.Hash()); + EXPECT_FALSE(t64_us == t64_ns); +} + } // namespace gandiva diff --git a/cpp/src/gandiva/precompiled/epoch_time_point.h b/cpp/src/gandiva/precompiled/epoch_time_point.h index 45cfb28ca38c..b8e48e1ae8a9 100644 --- a/cpp/src/gandiva/precompiled/epoch_time_point.h +++ b/cpp/src/gandiva/precompiled/epoch_time_point.h @@ -24,14 +24,20 @@ bool is_leap_year(int yy); bool did_days_overflow(arrow_vendored::date::year_month_day ymd); int last_possible_day_in_month(int month, int year); -// A point of time measured in millis since epoch. -class EpochTimePoint { +// Template class for precision-aware time point operations. +// Duration should be one of: std::chrono::seconds, milliseconds, microseconds, nanoseconds +template +class EpochTimePointT { public: - explicit EpochTimePoint(std::chrono::milliseconds millis_since_epoch) - : tp_(millis_since_epoch) {} + using duration_type = Duration; + using time_point_type = + std::chrono::time_point; - explicit EpochTimePoint(int64_t millis_since_epoch) - : EpochTimePoint(std::chrono::milliseconds(millis_since_epoch)) {} + explicit EpochTimePointT(Duration duration_since_epoch) + : tp_(duration_since_epoch) {} + + explicit EpochTimePointT(int64_t value_since_epoch) + : EpochTimePointT(Duration(value_since_epoch)) {} int TmYear() const { return static_cast(YearMonthDay().year()) - 1900; } @@ -62,19 +68,29 @@ class EpochTimePoint { return static_cast(TimeOfDay().seconds().count()); } - EpochTimePoint AddYears(int num_years) const { + // Returns sub-second component in the native duration unit + // For milliseconds: returns 0-999 + // For microseconds: returns 0-999999 + // For nanoseconds: returns 0-999999999 + int64_t SubSeconds() const { + auto since_midnight = tp_ - arrow_vendored::date::floor(tp_); + auto secs = std::chrono::duration_cast(since_midnight); + return (since_midnight - secs).count(); + } + + EpochTimePointT AddYears(int num_years) const { auto ymd = YearMonthDay() + arrow_vendored::date::years(num_years); - return EpochTimePoint((arrow_vendored::date::sys_days{ymd} + // NOLINT - TimeOfDay().to_duration()) - .time_since_epoch()); + return EpochTimePointT( + std::chrono::duration_cast( + (arrow_vendored::date::sys_days{ymd} + TimeOfDayDuration()).time_since_epoch())); } - EpochTimePoint AddMonths(int num_months) const { + EpochTimePointT AddMonths(int num_months) const { auto ymd = YearMonthDay() + arrow_vendored::date::months(num_months); - EpochTimePoint tp = EpochTimePoint((arrow_vendored::date::sys_days{ymd} + // NOLINT - TimeOfDay().to_duration()) - .time_since_epoch()); + EpochTimePointT tp( + std::chrono::duration_cast( + (arrow_vendored::date::sys_days{ymd} + TimeOfDayDuration()).time_since_epoch())); if (did_days_overflow(ymd)) { int days_to_offset = @@ -86,26 +102,36 @@ class EpochTimePoint { return tp; } - EpochTimePoint AddDays(int num_days) const { - auto days_since_epoch = arrow_vendored::date::sys_days{YearMonthDay()} + // NOLINT + EpochTimePointT AddDays(int num_days) const { + auto days_since_epoch = arrow_vendored::date::sys_days{YearMonthDay()} + arrow_vendored::date::days(num_days); - return EpochTimePoint( - (days_since_epoch + TimeOfDay().to_duration()).time_since_epoch()); + return EpochTimePointT( + std::chrono::duration_cast( + (days_since_epoch + TimeOfDayDuration()).time_since_epoch())); } - EpochTimePoint ClearTimeOfDay() const { - return EpochTimePoint((tp_ - TimeOfDay().to_duration()).time_since_epoch()); + EpochTimePointT ClearTimeOfDay() const { + return EpochTimePointT( + std::chrono::duration_cast( + (tp_ - TimeOfDayDuration()).time_since_epoch())); } - bool operator==(const EpochTimePoint& other) const { return tp_ == other.tp_; } + bool operator==(const EpochTimePointT& other) const { return tp_ == other.tp_; } - int64_t MillisSinceEpoch() const { return tp_.time_since_epoch().count(); } + // Returns the value in the native duration unit + int64_t ValueSinceEpoch() const { return tp_.time_since_epoch().count(); } + + // For backward compatibility with existing code expecting milliseconds + int64_t MillisSinceEpoch() const { + return std::chrono::duration_cast( + tp_.time_since_epoch()) + .count(); + } - arrow_vendored::date::time_of_day TimeOfDay() const { - auto millis_since_midnight = + arrow_vendored::date::time_of_day TimeOfDay() const { + auto duration_since_midnight = tp_ - arrow_vendored::date::floor(tp_); - return arrow_vendored::date::time_of_day( - millis_since_midnight); + return arrow_vendored::date::time_of_day(duration_since_midnight); } private: @@ -114,5 +140,19 @@ class EpochTimePoint { arrow_vendored::date::floor(tp_)}; // NOLINT } - std::chrono::time_point tp_; + // Returns time of day as a duration for arithmetic operations + Duration TimeOfDayDuration() const { + return tp_ - arrow_vendored::date::floor(tp_); + } + + time_point_type tp_; }; + +// Type aliases for each precision level +using EpochTimePointSec = EpochTimePointT; +using EpochTimePointMilli = EpochTimePointT; +using EpochTimePointMicro = EpochTimePointT; +using EpochTimePointNano = EpochTimePointT; + +// Backward compatibility: existing code uses EpochTimePoint with milliseconds +using EpochTimePoint = EpochTimePointMilli; diff --git a/cpp/src/gandiva/precompiled/epoch_time_point_test.cc b/cpp/src/gandiva/precompiled/epoch_time_point_test.cc index 9180aac07634..cdc493da8e75 100644 --- a/cpp/src/gandiva/precompiled/epoch_time_point_test.cc +++ b/cpp/src/gandiva/precompiled/epoch_time_point_test.cc @@ -100,4 +100,113 @@ TEST(TestEpochTimePoint, TestClearTimeOfDay) { EpochTimePoint(StringToTimestamp("2015-05-05 00:00:00"))); } +// Tests for precision-aware EpochTimePointT template +TEST(TestEpochTimePointT, TestAllPrecisionsExtractYear) { + // Test date: 2023-06-15 14:30:45.123456789 + // Unix epoch seconds: 1686839445 + + // timestamp[s] - seconds precision + EpochTimePointSec tp_sec(1686839445LL); + EXPECT_EQ(tp_sec.TmYear() + 1900, 2023); + EXPECT_EQ(tp_sec.TmMon() + 1, 6); + EXPECT_EQ(tp_sec.TmMday(), 15); + EXPECT_EQ(tp_sec.TmHour(), 14); + EXPECT_EQ(tp_sec.TmMin(), 30); + EXPECT_EQ(tp_sec.TmSec(), 45); + + // timestamp[ms] - milliseconds precision + EpochTimePointMilli tp_ms(1686839445123LL); + EXPECT_EQ(tp_ms.TmYear() + 1900, 2023); + EXPECT_EQ(tp_ms.TmMon() + 1, 6); + EXPECT_EQ(tp_ms.TmMday(), 15); + EXPECT_EQ(tp_ms.TmHour(), 14); + EXPECT_EQ(tp_ms.TmMin(), 30); + EXPECT_EQ(tp_ms.TmSec(), 45); + + // timestamp[us] - microseconds precision + EpochTimePointMicro tp_us(1686839445123456LL); + EXPECT_EQ(tp_us.TmYear() + 1900, 2023); + EXPECT_EQ(tp_us.TmMon() + 1, 6); + EXPECT_EQ(tp_us.TmMday(), 15); + EXPECT_EQ(tp_us.TmHour(), 14); + EXPECT_EQ(tp_us.TmMin(), 30); + EXPECT_EQ(tp_us.TmSec(), 45); + + // timestamp[ns] - nanoseconds precision + EpochTimePointNano tp_ns(1686839445123456789LL); + EXPECT_EQ(tp_ns.TmYear() + 1900, 2023); + EXPECT_EQ(tp_ns.TmMon() + 1, 6); + EXPECT_EQ(tp_ns.TmMday(), 15); + EXPECT_EQ(tp_ns.TmHour(), 14); + EXPECT_EQ(tp_ns.TmMin(), 30); + EXPECT_EQ(tp_ns.TmSec(), 45); +} + +TEST(TestEpochTimePointT, TestSubSeconds) { + // timestamp[ms] - subseconds returns milliseconds (0-999) + EpochTimePointMilli tp_ms(1686839445123LL); + EXPECT_EQ(tp_ms.SubSeconds(), 123); + + // timestamp[us] - subseconds returns microseconds (0-999999) + EpochTimePointMicro tp_us(1686839445123456LL); + EXPECT_EQ(tp_us.SubSeconds(), 123456); + + // timestamp[ns] - subseconds returns nanoseconds (0-999999999) + EpochTimePointNano tp_ns(1686839445123456789LL); + EXPECT_EQ(tp_ns.SubSeconds(), 123456789); +} + +TEST(TestEpochTimePointT, TestValueSinceEpoch) { + // Each precision should return the original value + EpochTimePointSec tp_sec(1686839445LL); + EXPECT_EQ(tp_sec.ValueSinceEpoch(), 1686839445LL); + + EpochTimePointMilli tp_ms(1686839445123LL); + EXPECT_EQ(tp_ms.ValueSinceEpoch(), 1686839445123LL); + + EpochTimePointMicro tp_us(1686839445123456LL); + EXPECT_EQ(tp_us.ValueSinceEpoch(), 1686839445123456LL); + + EpochTimePointNano tp_ns(1686839445123456789LL); + EXPECT_EQ(tp_ns.ValueSinceEpoch(), 1686839445123456789LL); +} + +TEST(TestEpochTimePointT, TestMillisSinceEpoch) { + // All precisions should convert to milliseconds correctly + EpochTimePointSec tp_sec(1686839445LL); + EXPECT_EQ(tp_sec.MillisSinceEpoch(), 1686839445000LL); + + EpochTimePointMilli tp_ms(1686839445123LL); + EXPECT_EQ(tp_ms.MillisSinceEpoch(), 1686839445123LL); + + EpochTimePointMicro tp_us(1686839445123456LL); + EXPECT_EQ(tp_us.MillisSinceEpoch(), 1686839445123LL); + + EpochTimePointNano tp_ns(1686839445123456789LL); + EXPECT_EQ(tp_ns.MillisSinceEpoch(), 1686839445123LL); +} + +TEST(TestEpochTimePointT, TestAddDaysPreservesPrecision) { + // Adding days should preserve sub-day precision + EpochTimePointMicro tp_us(1686839445123456LL); // 2023-06-15 14:30:45.123456 + auto tp_us_plus2 = tp_us.AddDays(2); + EXPECT_EQ(tp_us_plus2.TmMday(), 17); + // Sub-second portion should be preserved + EXPECT_EQ(tp_us_plus2.SubSeconds(), 123456); + + EpochTimePointNano tp_ns(1686839445123456789LL); + auto tp_ns_plus2 = tp_ns.AddDays(2); + EXPECT_EQ(tp_ns_plus2.TmMday(), 17); + EXPECT_EQ(tp_ns_plus2.SubSeconds(), 123456789); +} + +TEST(TestEpochTimePointT, TestNegativeTimestamps) { + // Before Unix epoch: 1960-06-15 00:00:00 + // Unix epoch seconds: approximately -301017600 + EpochTimePointSec tp_sec(-301017600LL); + EXPECT_EQ(tp_sec.TmYear() + 1900, 1960); + EXPECT_EQ(tp_sec.TmMon() + 1, 6); + EXPECT_EQ(tp_sec.TmMday(), 15); +} + } // namespace gandiva diff --git a/cpp/src/gandiva/precompiled/time.cc b/cpp/src/gandiva/precompiled/time.cc index ecfff4fe72a0..641929b4715c 100644 --- a/cpp/src/gandiva/precompiled/time.cc +++ b/cpp/src/gandiva/precompiled/time.cc @@ -99,6 +99,88 @@ DATE_TYPES(EXTRACT_DECADE) DATE_TYPES(EXTRACT_YEAR) +// Precision-aware extractYear for all timestamp units +FORCE_INLINE +gdv_int64 extractYear_timestamp_sec(gdv_timestamp_sec secs) { + EpochTimePointSec tp(secs); + return 1900 + tp.TmYear(); +} + +FORCE_INLINE +gdv_int64 extractYear_timestamp_ms(gdv_timestamp_ms millis) { + EpochTimePointMilli tp(millis); + return 1900 + tp.TmYear(); +} + +FORCE_INLINE +gdv_int64 extractYear_timestamp_us(gdv_timestamp_us micros) { + EpochTimePointMicro tp(micros); + return 1900 + tp.TmYear(); +} + +FORCE_INLINE +gdv_int64 extractYear_timestamp_ns(gdv_timestamp_ns nanos) { + EpochTimePointNano tp(nanos); + return 1900 + tp.TmYear(); +} + +// Macro to generate precision-aware extract functions for all timestamp units +#define EXTRACT_TIMESTAMP_PRECISION_FNS(EXTRACT_NAME, EXPR) \ + FORCE_INLINE \ + gdv_int64 EXTRACT_NAME##_timestamp_sec(gdv_timestamp_sec secs) { \ + EpochTimePointSec tp(secs); \ + return EXPR; \ + } \ + FORCE_INLINE \ + gdv_int64 EXTRACT_NAME##_timestamp_ms(gdv_timestamp_ms millis) { \ + EpochTimePointMilli tp(millis); \ + return EXPR; \ + } \ + FORCE_INLINE \ + gdv_int64 EXTRACT_NAME##_timestamp_us(gdv_timestamp_us micros) { \ + EpochTimePointMicro tp(micros); \ + return EXPR; \ + } \ + FORCE_INLINE \ + gdv_int64 EXTRACT_NAME##_timestamp_ns(gdv_timestamp_ns nanos) { \ + EpochTimePointNano tp(nanos); \ + return EXPR; \ + } + +// Precision-aware extract functions for all timestamp units +EXTRACT_TIMESTAMP_PRECISION_FNS(extractMonth, 1 + tp.TmMon()) +EXTRACT_TIMESTAMP_PRECISION_FNS(extractDay, tp.TmMday()) +EXTRACT_TIMESTAMP_PRECISION_FNS(extractHour, tp.TmHour()) +EXTRACT_TIMESTAMP_PRECISION_FNS(extractMinute, tp.TmMin()) +EXTRACT_TIMESTAMP_PRECISION_FNS(extractSecond, tp.TmSec()) +EXTRACT_TIMESTAMP_PRECISION_FNS(extractDoy, 1 + tp.TmYday()) +EXTRACT_TIMESTAMP_PRECISION_FNS(extractDow, 1 + tp.TmWday()) +EXTRACT_TIMESTAMP_PRECISION_FNS(extractQuarter, tp.TmMon() / 3 + 1) +EXTRACT_TIMESTAMP_PRECISION_FNS(extractMillennium, (1900 + tp.TmYear() - 1) / 1000 + 1) +EXTRACT_TIMESTAMP_PRECISION_FNS(extractCentury, (1900 + tp.TmYear() - 1) / 100 + 1) +EXTRACT_TIMESTAMP_PRECISION_FNS(extractDecade, (1900 + tp.TmYear()) / 10) + +// Precision-aware extractEpoch - returns seconds since epoch +FORCE_INLINE +gdv_int64 extractEpoch_timestamp_sec(gdv_timestamp_sec secs) { + return secs; +} + +FORCE_INLINE +gdv_int64 extractEpoch_timestamp_ms(gdv_timestamp_ms millis) { + return MILLIS_TO_SEC(millis); +} + +FORCE_INLINE +gdv_int64 extractEpoch_timestamp_us(gdv_timestamp_us micros) { + return MICROS_TO_SEC(micros); +} + +FORCE_INLINE +gdv_int64 extractEpoch_timestamp_ns(gdv_timestamp_ns nanos) { + return NANOS_TO_SEC(nanos); +} + #define EXTRACT_DOY(TYPE) \ FORCE_INLINE \ gdv_int64 extractDoy##_##TYPE(gdv_##TYPE millis) { \ @@ -365,6 +447,32 @@ gdv_int64 weekOfYear(const EpochTimePoint& tp) { DATE_TYPES(EXTRACT_WEEK) +// Precision-aware extractWeek for all timestamp units +// Week calculation uses EpochTimePoint (millis) since it doesn't need sub-ms precision +FORCE_INLINE +gdv_int64 extractWeek_timestamp_sec(gdv_timestamp_sec secs) { + EpochTimePoint tp(SECS_TO_MILLIS(secs)); + return weekOfYear(tp); +} + +FORCE_INLINE +gdv_int64 extractWeek_timestamp_ms(gdv_timestamp_ms millis) { + EpochTimePoint tp(millis); + return weekOfYear(tp); +} + +FORCE_INLINE +gdv_int64 extractWeek_timestamp_us(gdv_timestamp_us micros) { + EpochTimePoint tp(MICROS_TO_MILLIS(micros)); + return weekOfYear(tp); +} + +FORCE_INLINE +gdv_int64 extractWeek_timestamp_ns(gdv_timestamp_ns nanos) { + EpochTimePoint tp(NANOS_TO_MILLIS(nanos)); + return weekOfYear(tp); +} + #define EXTRACT_DOW(TYPE) \ FORCE_INLINE \ gdv_int64 extractDow##_##TYPE(gdv_##TYPE millis) { \ @@ -508,6 +616,135 @@ EXTRACT_HOUR_TIME(time32) DATE_TRUNC_FUNCTIONS(date64) DATE_TRUNC_FUNCTIONS(timestamp) +// Precision-aware date_trunc macros +// For fixed time units (second, minute, hour, day) +#define DATE_TRUNC_FIXED_UNIT_PRECISION(NAME, SUFFIX, TP_TYPE, UNIT_IN_PRECISION, TYPE_ALIAS) \ + FORCE_INLINE \ + TYPE_ALIAS NAME##_timestamp_##SUFFIX(TYPE_ALIAS value) { \ + return value >= 0 \ + ? ((value / UNIT_IN_PRECISION) * UNIT_IN_PRECISION) \ + : (((value - UNIT_IN_PRECISION + 1) / UNIT_IN_PRECISION) * UNIT_IN_PRECISION);\ + } + +// For week truncation +#define DATE_TRUNC_WEEK_PRECISION(SUFFIX, TP_TYPE, TYPE_ALIAS, CONVERSION_TO_MILLIS, CONVERSION_FROM_MILLIS) \ + FORCE_INLINE \ + TYPE_ALIAS date_trunc_Week_timestamp_##SUFFIX(TYPE_ALIAS value) { \ + EpochTimePoint tp(CONVERSION_TO_MILLIS(value)); \ + int ndays_to_trunc = 0; \ + if (tp.TmWday() == 0) { \ + ndays_to_trunc = 6; \ + } else { \ + ndays_to_trunc = tp.TmWday() - 1; \ + } \ + return CONVERSION_FROM_MILLIS(tp.AddDays(-ndays_to_trunc).ClearTimeOfDay().MillisSinceEpoch()); \ + } + +// For month-based truncation (month, quarter, year) +#define DATE_TRUNC_MONTH_UNITS_PRECISION(NAME, SUFFIX, TP_TYPE, TYPE_ALIAS, NMONTHS_IN_UNIT, \ + CONVERSION_TO_MILLIS, CONVERSION_FROM_MILLIS) \ + FORCE_INLINE \ + TYPE_ALIAS NAME##_timestamp_##SUFFIX(TYPE_ALIAS value) { \ + EpochTimePoint tp(CONVERSION_TO_MILLIS(value)); \ + int ndays_to_trunc = tp.TmMday() - 1; \ + int nmonths_to_trunc = (tp.TmMon() % NMONTHS_IN_UNIT); \ + return CONVERSION_FROM_MILLIS(tp.AddDays(-ndays_to_trunc) \ + .AddMonths(-nmonths_to_trunc) \ + .ClearTimeOfDay() \ + .MillisSinceEpoch()); \ + } + +// For year-based truncation (decade, century, millennium) +#define DATE_TRUNC_YEAR_UNITS_PRECISION(NAME, SUFFIX, TP_TYPE, TYPE_ALIAS, NYEARS_IN_UNIT, OFF_BY, \ + CONVERSION_TO_MILLIS, CONVERSION_FROM_MILLIS) \ + FORCE_INLINE \ + TYPE_ALIAS NAME##_timestamp_##SUFFIX(TYPE_ALIAS value) { \ + EpochTimePoint tp(CONVERSION_TO_MILLIS(value)); \ + int ndays_to_trunc = tp.TmMday() - 1; \ + int nmonths_to_trunc = tp.TmMon(); \ + int nyears_to_trunc = ((1900 + tp.TmYear() - OFF_BY) % NYEARS_IN_UNIT); \ + return CONVERSION_FROM_MILLIS(tp.AddDays(-ndays_to_trunc) \ + .AddMonths(-nmonths_to_trunc) \ + .AddYears(-nyears_to_trunc) \ + .ClearTimeOfDay() \ + .MillisSinceEpoch()); \ + } + +// Seconds precision functions +DATE_TRUNC_FIXED_UNIT_PRECISION(date_trunc_Second, sec, EpochTimePointSec, 1, gdv_timestamp_sec) +DATE_TRUNC_FIXED_UNIT_PRECISION(date_trunc_Minute, sec, EpochTimePointSec, SECS_IN_MIN, gdv_timestamp_sec) +DATE_TRUNC_FIXED_UNIT_PRECISION(date_trunc_Hour, sec, EpochTimePointSec, SECS_IN_HOUR, gdv_timestamp_sec) +DATE_TRUNC_FIXED_UNIT_PRECISION(date_trunc_Day, sec, EpochTimePointSec, SECS_IN_DAY, gdv_timestamp_sec) +DATE_TRUNC_WEEK_PRECISION(sec, EpochTimePointSec, gdv_timestamp_sec, SECS_TO_MILLIS, MILLIS_TO_SEC) +DATE_TRUNC_MONTH_UNITS_PRECISION(date_trunc_Month, sec, EpochTimePointSec, gdv_timestamp_sec, 1, SECS_TO_MILLIS, MILLIS_TO_SEC) +DATE_TRUNC_MONTH_UNITS_PRECISION(date_trunc_Quarter, sec, EpochTimePointSec, gdv_timestamp_sec, 3, SECS_TO_MILLIS, MILLIS_TO_SEC) +DATE_TRUNC_MONTH_UNITS_PRECISION(date_trunc_Year, sec, EpochTimePointSec, gdv_timestamp_sec, 12, SECS_TO_MILLIS, MILLIS_TO_SEC) +DATE_TRUNC_YEAR_UNITS_PRECISION(date_trunc_Decade, sec, EpochTimePointSec, gdv_timestamp_sec, 10, 0, SECS_TO_MILLIS, MILLIS_TO_SEC) +DATE_TRUNC_YEAR_UNITS_PRECISION(date_trunc_Century, sec, EpochTimePointSec, gdv_timestamp_sec, 100, 1, SECS_TO_MILLIS, MILLIS_TO_SEC) +DATE_TRUNC_YEAR_UNITS_PRECISION(date_trunc_Millennium, sec, EpochTimePointSec, gdv_timestamp_sec, 1000, 1, SECS_TO_MILLIS, MILLIS_TO_SEC) + +// Milliseconds precision functions (existing behavior, but explicitly named) +DATE_TRUNC_FIXED_UNIT_PRECISION(date_trunc_Second, ms, EpochTimePointMilli, MILLIS_IN_SEC, gdv_timestamp_ms) +DATE_TRUNC_FIXED_UNIT_PRECISION(date_trunc_Minute, ms, EpochTimePointMilli, MILLIS_IN_MIN, gdv_timestamp_ms) +DATE_TRUNC_FIXED_UNIT_PRECISION(date_trunc_Hour, ms, EpochTimePointMilli, MILLIS_IN_HOUR, gdv_timestamp_ms) +DATE_TRUNC_FIXED_UNIT_PRECISION(date_trunc_Day, ms, EpochTimePointMilli, MILLIS_IN_DAY, gdv_timestamp_ms) +DATE_TRUNC_WEEK_PRECISION(ms, EpochTimePointMilli, gdv_timestamp_ms, , ) +DATE_TRUNC_MONTH_UNITS_PRECISION(date_trunc_Month, ms, EpochTimePointMilli, gdv_timestamp_ms, 1, , ) +DATE_TRUNC_MONTH_UNITS_PRECISION(date_trunc_Quarter, ms, EpochTimePointMilli, gdv_timestamp_ms, 3, , ) +DATE_TRUNC_MONTH_UNITS_PRECISION(date_trunc_Year, ms, EpochTimePointMilli, gdv_timestamp_ms, 12, , ) +DATE_TRUNC_YEAR_UNITS_PRECISION(date_trunc_Decade, ms, EpochTimePointMilli, gdv_timestamp_ms, 10, 0, , ) +DATE_TRUNC_YEAR_UNITS_PRECISION(date_trunc_Century, ms, EpochTimePointMilli, gdv_timestamp_ms, 100, 1, , ) +DATE_TRUNC_YEAR_UNITS_PRECISION(date_trunc_Millennium, ms, EpochTimePointMilli, gdv_timestamp_ms, 1000, 1, , ) + +// Microseconds precision functions +DATE_TRUNC_FIXED_UNIT_PRECISION(date_trunc_Second, us, EpochTimePointMicro, MICROS_IN_SEC, gdv_timestamp_us) +DATE_TRUNC_FIXED_UNIT_PRECISION(date_trunc_Minute, us, EpochTimePointMicro, MICROS_IN_MIN, gdv_timestamp_us) +DATE_TRUNC_FIXED_UNIT_PRECISION(date_trunc_Hour, us, EpochTimePointMicro, MICROS_IN_HOUR, gdv_timestamp_us) +DATE_TRUNC_FIXED_UNIT_PRECISION(date_trunc_Day, us, EpochTimePointMicro, MICROS_IN_DAY, gdv_timestamp_us) +DATE_TRUNC_WEEK_PRECISION(us, EpochTimePointMicro, gdv_timestamp_us, MICROS_TO_MILLIS, MILLIS_TO_MICROS) +DATE_TRUNC_MONTH_UNITS_PRECISION(date_trunc_Month, us, EpochTimePointMicro, gdv_timestamp_us, 1, MICROS_TO_MILLIS, MILLIS_TO_MICROS) +DATE_TRUNC_MONTH_UNITS_PRECISION(date_trunc_Quarter, us, EpochTimePointMicro, gdv_timestamp_us, 3, MICROS_TO_MILLIS, MILLIS_TO_MICROS) +DATE_TRUNC_MONTH_UNITS_PRECISION(date_trunc_Year, us, EpochTimePointMicro, gdv_timestamp_us, 12, MICROS_TO_MILLIS, MILLIS_TO_MICROS) +DATE_TRUNC_YEAR_UNITS_PRECISION(date_trunc_Decade, us, EpochTimePointMicro, gdv_timestamp_us, 10, 0, MICROS_TO_MILLIS, MILLIS_TO_MICROS) +DATE_TRUNC_YEAR_UNITS_PRECISION(date_trunc_Century, us, EpochTimePointMicro, gdv_timestamp_us, 100, 1, MICROS_TO_MILLIS, MILLIS_TO_MICROS) +DATE_TRUNC_YEAR_UNITS_PRECISION(date_trunc_Millennium, us, EpochTimePointMicro, gdv_timestamp_us, 1000, 1, MICROS_TO_MILLIS, MILLIS_TO_MICROS) + +// Nanoseconds precision functions +DATE_TRUNC_FIXED_UNIT_PRECISION(date_trunc_Second, ns, EpochTimePointNano, NANOS_IN_SEC, gdv_timestamp_ns) +DATE_TRUNC_FIXED_UNIT_PRECISION(date_trunc_Minute, ns, EpochTimePointNano, NANOS_IN_MIN, gdv_timestamp_ns) +DATE_TRUNC_FIXED_UNIT_PRECISION(date_trunc_Hour, ns, EpochTimePointNano, NANOS_IN_HOUR, gdv_timestamp_ns) +DATE_TRUNC_FIXED_UNIT_PRECISION(date_trunc_Day, ns, EpochTimePointNano, NANOS_IN_DAY, gdv_timestamp_ns) +DATE_TRUNC_WEEK_PRECISION(ns, EpochTimePointNano, gdv_timestamp_ns, NANOS_TO_MILLIS, MILLIS_TO_NANOS) +DATE_TRUNC_MONTH_UNITS_PRECISION(date_trunc_Month, ns, EpochTimePointNano, gdv_timestamp_ns, 1, NANOS_TO_MILLIS, MILLIS_TO_NANOS) +DATE_TRUNC_MONTH_UNITS_PRECISION(date_trunc_Quarter, ns, EpochTimePointNano, gdv_timestamp_ns, 3, NANOS_TO_MILLIS, MILLIS_TO_NANOS) +DATE_TRUNC_MONTH_UNITS_PRECISION(date_trunc_Year, ns, EpochTimePointNano, gdv_timestamp_ns, 12, NANOS_TO_MILLIS, MILLIS_TO_NANOS) +DATE_TRUNC_YEAR_UNITS_PRECISION(date_trunc_Decade, ns, EpochTimePointNano, gdv_timestamp_ns, 10, 0, NANOS_TO_MILLIS, MILLIS_TO_NANOS) +DATE_TRUNC_YEAR_UNITS_PRECISION(date_trunc_Century, ns, EpochTimePointNano, gdv_timestamp_ns, 100, 1, NANOS_TO_MILLIS, MILLIS_TO_NANOS) +DATE_TRUNC_YEAR_UNITS_PRECISION(date_trunc_Millennium, ns, EpochTimePointNano, gdv_timestamp_ns, 1000, 1, NANOS_TO_MILLIS, MILLIS_TO_NANOS) + +// New: Millisecond truncation for us/ns (truncate to millisecond boundary) +FORCE_INLINE +gdv_timestamp_us date_trunc_Millisecond_timestamp_us(gdv_timestamp_us micros) { + return micros >= 0 + ? ((micros / MICROS_IN_MILLI) * MICROS_IN_MILLI) + : (((micros - MICROS_IN_MILLI + 1) / MICROS_IN_MILLI) * MICROS_IN_MILLI); +} + +FORCE_INLINE +gdv_timestamp_ns date_trunc_Millisecond_timestamp_ns(gdv_timestamp_ns nanos) { + return nanos >= 0 + ? ((nanos / NANOS_IN_MILLI) * NANOS_IN_MILLI) + : (((nanos - NANOS_IN_MILLI + 1) / NANOS_IN_MILLI) * NANOS_IN_MILLI); +} + +// New: Microsecond truncation for ns (truncate to microsecond boundary) +FORCE_INLINE +gdv_timestamp_ns date_trunc_Microsecond_timestamp_ns(gdv_timestamp_ns nanos) { + return nanos >= 0 + ? ((nanos / NANOS_IN_MICRO) * NANOS_IN_MICRO) + : (((nanos - NANOS_IN_MICRO + 1) / NANOS_IN_MICRO) * NANOS_IN_MICRO); +} + #define LAST_DAY_FUNC(TYPE) \ FORCE_INLINE \ gdv_date64 last_day_from_##TYPE(gdv_date64 millis) { \ @@ -619,6 +856,50 @@ static inline int32_t normalize_subseconds_to_millis(int32_t subseconds, DATE_TYPES(MONTHS_BETWEEN) +// Precision-aware months_between +// Note: For precision, we convert to milliseconds for the calendar calculation +// since months_between is a calendar operation, not a time-precise operation +FORCE_INLINE +double months_between_timestamp_sec_timestamp_sec(uint64_t endEpoch, uint64_t startEpoch) { + return months_between_timestamp_timestamp(SECS_TO_MILLIS(endEpoch), SECS_TO_MILLIS(startEpoch)); +} + +FORCE_INLINE +double months_between_timestamp_ms_timestamp_ms(uint64_t endEpoch, uint64_t startEpoch) { + return months_between_timestamp_timestamp(endEpoch, startEpoch); +} + +FORCE_INLINE +double months_between_timestamp_us_timestamp_us(uint64_t endEpoch, uint64_t startEpoch) { + return months_between_timestamp_timestamp(MICROS_TO_MILLIS(endEpoch), MICROS_TO_MILLIS(startEpoch)); +} + +FORCE_INLINE +double months_between_timestamp_ns_timestamp_ns(uint64_t endEpoch, uint64_t startEpoch) { + return months_between_timestamp_timestamp(NANOS_TO_MILLIS(endEpoch), NANOS_TO_MILLIS(startEpoch)); +} + +// Precision-aware last_day +FORCE_INLINE +gdv_date64 last_day_timestamp_sec(gdv_timestamp_sec secs) { + return last_day_from_timestamp(SECS_TO_MILLIS(secs)); +} + +FORCE_INLINE +gdv_date64 last_day_timestamp_ms(gdv_timestamp_ms millis) { + return last_day_from_timestamp(millis); +} + +FORCE_INLINE +gdv_date64 last_day_timestamp_us(gdv_timestamp_us micros) { + return last_day_from_timestamp(MICROS_TO_MILLIS(micros)); +} + +FORCE_INLINE +gdv_date64 last_day_timestamp_ns(gdv_timestamp_ns nanos) { + return last_day_from_timestamp(NANOS_TO_MILLIS(nanos)); +} + FORCE_INLINE void set_error_for_date(gdv_int32 length, const char* input, const char* msg, int64_t execution_context) { @@ -1058,4 +1339,148 @@ gdv_int32 datediff_timestamp_timestamp(gdv_timestamp start_millis, CAST_NULLABLE_INTERVAL_YEAR(int32) CAST_NULLABLE_INTERVAL_YEAR(int64) +// ============================================================================ +// Precision-aware cast and conversion functions +// ============================================================================ + +// Cast between timestamp precisions +FORCE_INLINE +gdv_timestamp_ms castTIMESTAMP_ms_timestamp_sec(gdv_timestamp_sec secs) { + return SECS_TO_MILLIS(secs); +} + +FORCE_INLINE +gdv_timestamp_us castTIMESTAMP_us_timestamp_sec(gdv_timestamp_sec secs) { + return SECS_TO_MICROS(secs); +} + +FORCE_INLINE +gdv_timestamp_ns castTIMESTAMP_ns_timestamp_sec(gdv_timestamp_sec secs) { + return SECS_TO_NANOS(secs); +} + +FORCE_INLINE +gdv_timestamp_sec castTIMESTAMP_sec_timestamp_ms(gdv_timestamp_ms millis) { + return MILLIS_TO_SEC(millis); +} + +FORCE_INLINE +gdv_timestamp_us castTIMESTAMP_us_timestamp_ms(gdv_timestamp_ms millis) { + return MILLIS_TO_MICROS(millis); +} + +FORCE_INLINE +gdv_timestamp_ns castTIMESTAMP_ns_timestamp_ms(gdv_timestamp_ms millis) { + return MILLIS_TO_NANOS(millis); +} + +FORCE_INLINE +gdv_timestamp_sec castTIMESTAMP_sec_timestamp_us(gdv_timestamp_us micros) { + return MICROS_TO_SEC(micros); +} + +FORCE_INLINE +gdv_timestamp_ms castTIMESTAMP_ms_timestamp_us(gdv_timestamp_us micros) { + return MICROS_TO_MILLIS(micros); +} + +FORCE_INLINE +gdv_timestamp_ns castTIMESTAMP_ns_timestamp_us(gdv_timestamp_us micros) { + return MICROS_TO_NANOS(micros); +} + +FORCE_INLINE +gdv_timestamp_sec castTIMESTAMP_sec_timestamp_ns(gdv_timestamp_ns nanos) { + return NANOS_TO_SEC(nanos); +} + +FORCE_INLINE +gdv_timestamp_ms castTIMESTAMP_ms_timestamp_ns(gdv_timestamp_ns nanos) { + return NANOS_TO_MILLIS(nanos); +} + +FORCE_INLINE +gdv_timestamp_us castTIMESTAMP_us_timestamp_ns(gdv_timestamp_ns nanos) { + return NANOS_TO_MICROS(nanos); +} + +// Cast timestamp to date64 for all precisions +FORCE_INLINE +gdv_date64 castDATE_timestamp_sec(gdv_timestamp_sec secs) { + EpochTimePoint tp(SECS_TO_MILLIS(secs)); + return tp.ClearTimeOfDay().MillisSinceEpoch(); +} + +FORCE_INLINE +gdv_date64 castDATE_timestamp_ms(gdv_timestamp_ms millis) { + EpochTimePoint tp(millis); + return tp.ClearTimeOfDay().MillisSinceEpoch(); +} + +FORCE_INLINE +gdv_date64 castDATE_timestamp_us(gdv_timestamp_us micros) { + EpochTimePoint tp(MICROS_TO_MILLIS(micros)); + return tp.ClearTimeOfDay().MillisSinceEpoch(); +} + +FORCE_INLINE +gdv_date64 castDATE_timestamp_ns(gdv_timestamp_ns nanos) { + EpochTimePoint tp(NANOS_TO_MILLIS(nanos)); + return tp.ClearTimeOfDay().MillisSinceEpoch(); +} + +// Cast timestamp to time32 for all precisions +FORCE_INLINE +gdv_time32 castTIME_timestamp_sec(gdv_timestamp_sec secs) { + EpochTimePoint tp(SECS_TO_MILLIS(secs)); + auto tp_at_midnight = tp.ClearTimeOfDay(); + return static_cast(tp.MillisSinceEpoch() - tp_at_midnight.MillisSinceEpoch()); +} + +FORCE_INLINE +gdv_time32 castTIME_timestamp_ms(gdv_timestamp_ms millis) { + EpochTimePoint tp(millis); + auto tp_at_midnight = tp.ClearTimeOfDay(); + return static_cast(tp.MillisSinceEpoch() - tp_at_midnight.MillisSinceEpoch()); +} + +FORCE_INLINE +gdv_time32 castTIME_timestamp_us(gdv_timestamp_us micros) { + EpochTimePoint tp(MICROS_TO_MILLIS(micros)); + auto tp_at_midnight = tp.ClearTimeOfDay(); + return static_cast(tp.MillisSinceEpoch() - tp_at_midnight.MillisSinceEpoch()); +} + +FORCE_INLINE +gdv_time32 castTIME_timestamp_ns(gdv_timestamp_ns nanos) { + EpochTimePoint tp(NANOS_TO_MILLIS(nanos)); + auto tp_at_midnight = tp.ClearTimeOfDay(); + return static_cast(tp.MillisSinceEpoch() - tp_at_midnight.MillisSinceEpoch()); +} + +// datediff for all timestamp precisions +FORCE_INLINE +gdv_int32 datediff_timestamp_sec_timestamp_sec(gdv_timestamp_sec start_secs, + gdv_timestamp_sec end_secs) { + return static_cast((start_secs - end_secs) / SECS_IN_DAY); +} + +FORCE_INLINE +gdv_int32 datediff_timestamp_ms_timestamp_ms(gdv_timestamp_ms start_ms, + gdv_timestamp_ms end_ms) { + return static_cast((start_ms - end_ms) / MILLIS_IN_DAY); +} + +FORCE_INLINE +gdv_int32 datediff_timestamp_us_timestamp_us(gdv_timestamp_us start_us, + gdv_timestamp_us end_us) { + return static_cast((start_us - end_us) / MICROS_IN_DAY); +} + +FORCE_INLINE +gdv_int32 datediff_timestamp_ns_timestamp_ns(gdv_timestamp_ns start_ns, + gdv_timestamp_ns end_ns) { + return static_cast((start_ns - end_ns) / NANOS_IN_DAY); +} + } // extern "C" diff --git a/cpp/src/gandiva/precompiled/time_constants.h b/cpp/src/gandiva/precompiled/time_constants.h index 015ef4bf9f7d..fa6f7bb1155c 100644 --- a/cpp/src/gandiva/precompiled/time_constants.h +++ b/cpp/src/gandiva/precompiled/time_constants.h @@ -17,14 +17,62 @@ #pragma once -#define MILLIS_IN_SEC (1000) -#define MILLIS_IN_MIN (60 * MILLIS_IN_SEC) -#define MILLIS_IN_HOUR (60 * MILLIS_IN_MIN) -#define MILLIS_IN_DAY (24 * MILLIS_IN_HOUR) -#define MILLIS_IN_WEEK (7 * MILLIS_IN_DAY) +// Millisecond-based constants (existing) +#define MILLIS_IN_SEC (1000LL) +#define MILLIS_IN_MIN (60LL * MILLIS_IN_SEC) +#define MILLIS_IN_HOUR (60LL * MILLIS_IN_MIN) +#define MILLIS_IN_DAY (24LL * MILLIS_IN_HOUR) +#define MILLIS_IN_WEEK (7LL * MILLIS_IN_DAY) +// Microsecond-based constants +#define MICROS_IN_MILLI (1000LL) +#define MICROS_IN_SEC (1000000LL) +#define MICROS_IN_MIN (60LL * MICROS_IN_SEC) +#define MICROS_IN_HOUR (60LL * MICROS_IN_MIN) +#define MICROS_IN_DAY (24LL * MICROS_IN_HOUR) +#define MICROS_IN_WEEK (7LL * MICROS_IN_DAY) + +// Nanosecond-based constants +#define NANOS_IN_MICRO (1000LL) +#define NANOS_IN_MILLI (1000000LL) +#define NANOS_IN_SEC (1000000000LL) +#define NANOS_IN_MIN (60LL * NANOS_IN_SEC) +#define NANOS_IN_HOUR (60LL * NANOS_IN_MIN) +#define NANOS_IN_DAY (24LL * NANOS_IN_HOUR) +#define NANOS_IN_WEEK (7LL * NANOS_IN_DAY) + +// Seconds-based constants +#define SECS_IN_MIN (60LL) +#define SECS_IN_HOUR (60LL * SECS_IN_MIN) +#define SECS_IN_DAY (24LL * SECS_IN_HOUR) +#define SECS_IN_WEEK (7LL * SECS_IN_DAY) + +// Millisecond conversion macros (existing) #define MILLIS_TO_SEC(millis) ((millis) / MILLIS_IN_SEC) #define MILLIS_TO_MINS(millis) ((millis) / MILLIS_IN_MIN) #define MILLIS_TO_HOUR(millis) ((millis) / MILLIS_IN_HOUR) #define MILLIS_TO_DAY(millis) ((millis) / MILLIS_IN_DAY) #define MILLIS_TO_WEEK(millis) ((millis) / MILLIS_IN_WEEK) + +// Microsecond conversion macros +#define MICROS_TO_SEC(micros) ((micros) / MICROS_IN_SEC) +#define MICROS_TO_MINS(micros) ((micros) / MICROS_IN_MIN) +#define MICROS_TO_HOUR(micros) ((micros) / MICROS_IN_HOUR) +#define MICROS_TO_DAY(micros) ((micros) / MICROS_IN_DAY) +#define MICROS_TO_MILLIS(micros) ((micros) / MICROS_IN_MILLI) + +// Nanosecond conversion macros +#define NANOS_TO_SEC(nanos) ((nanos) / NANOS_IN_SEC) +#define NANOS_TO_MINS(nanos) ((nanos) / NANOS_IN_MIN) +#define NANOS_TO_HOUR(nanos) ((nanos) / NANOS_IN_HOUR) +#define NANOS_TO_DAY(nanos) ((nanos) / NANOS_IN_DAY) +#define NANOS_TO_MILLIS(nanos) ((nanos) / NANOS_IN_MILLI) +#define NANOS_TO_MICROS(nanos) ((nanos) / NANOS_IN_MICRO) + +// Upward conversion macros +#define SECS_TO_MILLIS(secs) ((secs) * MILLIS_IN_SEC) +#define SECS_TO_MICROS(secs) ((secs) * MICROS_IN_SEC) +#define SECS_TO_NANOS(secs) ((secs) * NANOS_IN_SEC) +#define MILLIS_TO_MICROS(millis) ((millis) * MICROS_IN_MILLI) +#define MILLIS_TO_NANOS(millis) ((millis) * NANOS_IN_MILLI) +#define MICROS_TO_NANOS(micros) ((micros) * NANOS_IN_MICRO) diff --git a/cpp/src/gandiva/precompiled/time_test.cc b/cpp/src/gandiva/precompiled/time_test.cc index bdaf3dc2a5fa..ecb4150b39b5 100644 --- a/cpp/src/gandiva/precompiled/time_test.cc +++ b/cpp/src/gandiva/precompiled/time_test.cc @@ -327,6 +327,133 @@ TEST(TestTime, TestExtractTimestamp) { EXPECT_EQ(extractSecond_timestamp(ts), 33); } +TEST(TestTime, TestExtractYearTimestampPrecisions) { + // Test date: 2023-06-15 14:30:45.123456789 + // Unix epoch seconds: 1686839445 + constexpr int64_t epoch_sec = 1686839445LL; + + // Test extractYear with seconds precision + EXPECT_EQ(extractYear_timestamp_sec(epoch_sec), 2023); + + // Test extractYear with milliseconds precision + EXPECT_EQ(extractYear_timestamp_ms(epoch_sec * 1000 + 123), 2023); + + // Test extractYear with microseconds precision + EXPECT_EQ(extractYear_timestamp_us(epoch_sec * 1000000 + 123456), 2023); + + // Test extractYear with nanoseconds precision + EXPECT_EQ(extractYear_timestamp_ns(epoch_sec * 1000000000LL + 123456789), 2023); + + // Test year boundaries + // 1999-12-31 23:59:59 UTC -> should be 1999 + constexpr int64_t dec31_1999_sec = 946684799LL; + EXPECT_EQ(extractYear_timestamp_sec(dec31_1999_sec), 1999); + + // 2000-01-01 00:00:00 UTC -> should be 2000 + constexpr int64_t jan1_2000_sec = 946684800LL; + EXPECT_EQ(extractYear_timestamp_sec(jan1_2000_sec), 2000); + + // Test with different precisions for the same moment + EXPECT_EQ(extractYear_timestamp_ms(jan1_2000_sec * 1000), 2000); + EXPECT_EQ(extractYear_timestamp_us(jan1_2000_sec * 1000000), 2000); + EXPECT_EQ(extractYear_timestamp_ns(jan1_2000_sec * 1000000000LL), 2000); + + // Test negative timestamps (before 1970) + // 1960-06-15 00:00:00 UTC + constexpr int64_t jun15_1960_sec = -301017600LL; + EXPECT_EQ(extractYear_timestamp_sec(jun15_1960_sec), 1960); + EXPECT_EQ(extractYear_timestamp_ms(jun15_1960_sec * 1000), 1960); + EXPECT_EQ(extractYear_timestamp_us(jun15_1960_sec * 1000000), 1960); + EXPECT_EQ(extractYear_timestamp_ns(jun15_1960_sec * 1000000000LL), 1960); +} + +TEST(TestTime, TestExtractAllFunctionsTimestampPrecisions) { + // Test date: 2023-06-15 14:30:45 (Thursday, week 24, day 166) + // Unix epoch seconds: 1686839445 + constexpr int64_t epoch_sec = 1686839445LL; + constexpr int64_t epoch_ms = epoch_sec * 1000 + 123; + constexpr int64_t epoch_us = epoch_sec * 1000000 + 123456; + constexpr int64_t epoch_ns = epoch_sec * 1000000000LL + 123456789; + + // Test extractMonth (June = 6) + EXPECT_EQ(extractMonth_timestamp_sec(epoch_sec), 6); + EXPECT_EQ(extractMonth_timestamp_ms(epoch_ms), 6); + EXPECT_EQ(extractMonth_timestamp_us(epoch_us), 6); + EXPECT_EQ(extractMonth_timestamp_ns(epoch_ns), 6); + + // Test extractDay (15) + EXPECT_EQ(extractDay_timestamp_sec(epoch_sec), 15); + EXPECT_EQ(extractDay_timestamp_ms(epoch_ms), 15); + EXPECT_EQ(extractDay_timestamp_us(epoch_us), 15); + EXPECT_EQ(extractDay_timestamp_ns(epoch_ns), 15); + + // Test extractHour (14) + EXPECT_EQ(extractHour_timestamp_sec(epoch_sec), 14); + EXPECT_EQ(extractHour_timestamp_ms(epoch_ms), 14); + EXPECT_EQ(extractHour_timestamp_us(epoch_us), 14); + EXPECT_EQ(extractHour_timestamp_ns(epoch_ns), 14); + + // Test extractMinute (30) + EXPECT_EQ(extractMinute_timestamp_sec(epoch_sec), 30); + EXPECT_EQ(extractMinute_timestamp_ms(epoch_ms), 30); + EXPECT_EQ(extractMinute_timestamp_us(epoch_us), 30); + EXPECT_EQ(extractMinute_timestamp_ns(epoch_ns), 30); + + // Test extractSecond (45) + EXPECT_EQ(extractSecond_timestamp_sec(epoch_sec), 45); + EXPECT_EQ(extractSecond_timestamp_ms(epoch_ms), 45); + EXPECT_EQ(extractSecond_timestamp_us(epoch_us), 45); + EXPECT_EQ(extractSecond_timestamp_ns(epoch_ns), 45); + + // Test extractDoy (June 15 = day 166) + EXPECT_EQ(extractDoy_timestamp_sec(epoch_sec), 166); + EXPECT_EQ(extractDoy_timestamp_ms(epoch_ms), 166); + EXPECT_EQ(extractDoy_timestamp_us(epoch_us), 166); + EXPECT_EQ(extractDoy_timestamp_ns(epoch_ns), 166); + + // Test extractDow (Thursday = 5 in 1-indexed Sun=1 format) + EXPECT_EQ(extractDow_timestamp_sec(epoch_sec), 5); + EXPECT_EQ(extractDow_timestamp_ms(epoch_ms), 5); + EXPECT_EQ(extractDow_timestamp_us(epoch_us), 5); + EXPECT_EQ(extractDow_timestamp_ns(epoch_ns), 5); + + // Test extractQuarter (June = Q2) + EXPECT_EQ(extractQuarter_timestamp_sec(epoch_sec), 2); + EXPECT_EQ(extractQuarter_timestamp_ms(epoch_ms), 2); + EXPECT_EQ(extractQuarter_timestamp_us(epoch_us), 2); + EXPECT_EQ(extractQuarter_timestamp_ns(epoch_ns), 2); + + // Test extractWeek (ISO week 24) + EXPECT_EQ(extractWeek_timestamp_sec(epoch_sec), 24); + EXPECT_EQ(extractWeek_timestamp_ms(epoch_ms), 24); + EXPECT_EQ(extractWeek_timestamp_us(epoch_us), 24); + EXPECT_EQ(extractWeek_timestamp_ns(epoch_ns), 24); + + // Test extractEpoch (returns seconds) + EXPECT_EQ(extractEpoch_timestamp_sec(epoch_sec), epoch_sec); + EXPECT_EQ(extractEpoch_timestamp_ms(epoch_ms), epoch_sec); + EXPECT_EQ(extractEpoch_timestamp_us(epoch_us), epoch_sec); + EXPECT_EQ(extractEpoch_timestamp_ns(epoch_ns), epoch_sec); + + // Test extractMillennium (2023 = 3rd millennium) + EXPECT_EQ(extractMillennium_timestamp_sec(epoch_sec), 3); + EXPECT_EQ(extractMillennium_timestamp_ms(epoch_ms), 3); + EXPECT_EQ(extractMillennium_timestamp_us(epoch_us), 3); + EXPECT_EQ(extractMillennium_timestamp_ns(epoch_ns), 3); + + // Test extractCentury (2023 = 21st century) + EXPECT_EQ(extractCentury_timestamp_sec(epoch_sec), 21); + EXPECT_EQ(extractCentury_timestamp_ms(epoch_ms), 21); + EXPECT_EQ(extractCentury_timestamp_us(epoch_us), 21); + EXPECT_EQ(extractCentury_timestamp_ns(epoch_ns), 21); + + // Test extractDecade (2023 / 10 = 202) + EXPECT_EQ(extractDecade_timestamp_sec(epoch_sec), 202); + EXPECT_EQ(extractDecade_timestamp_ms(epoch_ms), 202); + EXPECT_EQ(extractDecade_timestamp_us(epoch_us), 202); + EXPECT_EQ(extractDecade_timestamp_ns(epoch_ns), 202); +} + TEST(TestTime, TimeStampTrunc) { EXPECT_EQ(date_trunc_Second_date64(StringToTimestamp("2015-05-05 10:20:34")), StringToTimestamp("2015-05-05 10:20:34")); @@ -388,6 +515,59 @@ TEST(TestTime, TimeStampTrunc) { StringToTimestamp("2000-03-06 00:00:00")); } +TEST(TestTime, TestDateTruncTimestampPrecisions) { + // Test date: 2023-06-15 14:30:45 + // Unix epoch seconds: 1686839445 + constexpr int64_t epoch_sec = 1686839445LL; + constexpr int64_t epoch_ms = epoch_sec * 1000 + 123; + constexpr int64_t epoch_us = epoch_sec * 1000000 + 123456; + constexpr int64_t epoch_ns = epoch_sec * 1000000000LL + 123456789; + + // Expected truncated values + // 2023-06-15 14:30:45 -> second truncation keeps same value + constexpr int64_t trunc_sec_sec = 1686839445LL; + constexpr int64_t trunc_sec_ms = trunc_sec_sec * 1000; + constexpr int64_t trunc_sec_us = trunc_sec_sec * 1000000; + constexpr int64_t trunc_sec_ns = trunc_sec_sec * 1000000000LL; + + EXPECT_EQ(date_trunc_Second_timestamp_sec(epoch_sec), trunc_sec_sec); + EXPECT_EQ(date_trunc_Second_timestamp_ms(epoch_ms), trunc_sec_ms); + EXPECT_EQ(date_trunc_Second_timestamp_us(epoch_us), trunc_sec_us); + EXPECT_EQ(date_trunc_Second_timestamp_ns(epoch_ns), trunc_sec_ns); + + // 2023-06-15 14:30:00 + constexpr int64_t trunc_min_sec = 1686839400LL; + EXPECT_EQ(date_trunc_Minute_timestamp_sec(epoch_sec), trunc_min_sec); + EXPECT_EQ(date_trunc_Minute_timestamp_ms(epoch_ms), trunc_min_sec * 1000); + EXPECT_EQ(date_trunc_Minute_timestamp_us(epoch_us), trunc_min_sec * 1000000); + EXPECT_EQ(date_trunc_Minute_timestamp_ns(epoch_ns), trunc_min_sec * 1000000000LL); + + // 2023-06-15 14:00:00 + constexpr int64_t trunc_hour_sec = 1686837600LL; + EXPECT_EQ(date_trunc_Hour_timestamp_sec(epoch_sec), trunc_hour_sec); + EXPECT_EQ(date_trunc_Hour_timestamp_ms(epoch_ms), trunc_hour_sec * 1000); + EXPECT_EQ(date_trunc_Hour_timestamp_us(epoch_us), trunc_hour_sec * 1000000); + EXPECT_EQ(date_trunc_Hour_timestamp_ns(epoch_ns), trunc_hour_sec * 1000000000LL); + + // 2023-06-15 00:00:00 + constexpr int64_t trunc_day_sec = 1686787200LL; + EXPECT_EQ(date_trunc_Day_timestamp_sec(epoch_sec), trunc_day_sec); + EXPECT_EQ(date_trunc_Day_timestamp_ms(epoch_ms), trunc_day_sec * 1000); + EXPECT_EQ(date_trunc_Day_timestamp_us(epoch_us), trunc_day_sec * 1000000); + EXPECT_EQ(date_trunc_Day_timestamp_ns(epoch_ns), trunc_day_sec * 1000000000LL); + + // Test sub-millisecond truncation + // Truncate to millisecond boundary (us precision) + EXPECT_EQ(date_trunc_Millisecond_timestamp_us(epoch_us), + (epoch_sec * 1000 + 123) * 1000); // 123456 -> 123000 micros + // Truncate to millisecond boundary (ns precision) + EXPECT_EQ(date_trunc_Millisecond_timestamp_ns(epoch_ns), + (epoch_sec * 1000 + 123) * 1000000); // 123456789 -> 123000000 nanos + // Truncate to microsecond boundary (ns precision) + EXPECT_EQ(date_trunc_Microsecond_timestamp_ns(epoch_ns), + (epoch_sec * 1000000 + 123456) * 1000); // 123456789 -> 123456000 nanos +} + TEST(TestTime, TimeStampAdd) { EXPECT_EQ( timestampaddSecond_int32_timestamp(30, StringToTimestamp("2000-05-01 10:20:34")), @@ -499,6 +679,53 @@ TEST(TestTime, TimeStampAdd) { StringToTimestamp("1999-03-01 00:00:00")); } +TEST(TestTime, TestTimestampAddPrecisions) { + // Base timestamp: 2023-06-15 14:30:45 + // Unix epoch seconds: 1686839445 + constexpr int64_t base_sec = 1686839445LL; + constexpr int64_t base_ms = base_sec * 1000 + 123; + constexpr int64_t base_us = base_sec * 1000000 + 123456; + constexpr int64_t base_ns = base_sec * 1000000000LL + 123456789; + + // Add 30 seconds + EXPECT_EQ(timestampaddSecond_int32_timestamp_sec(30, base_sec), base_sec + 30); + EXPECT_EQ(timestampaddSecond_int32_timestamp_ms(30, base_ms), base_ms + 30 * MILLIS_IN_SEC); + EXPECT_EQ(timestampaddSecond_int32_timestamp_us(30, base_us), base_us + 30 * MICROS_IN_SEC); + EXPECT_EQ(timestampaddSecond_int32_timestamp_ns(30, base_ns), base_ns + 30 * NANOS_IN_SEC); + + // Reverse argument order + EXPECT_EQ(timestampaddSecond_timestamp_sec_int32(base_sec, 30), base_sec + 30); + EXPECT_EQ(timestampaddSecond_timestamp_ms_int32(base_ms, 30), base_ms + 30 * MILLIS_IN_SEC); + + // Add 5 minutes + EXPECT_EQ(timestampaddMinute_int64_timestamp_sec(5, base_sec), base_sec + 5 * SECS_IN_MIN); + EXPECT_EQ(timestampaddMinute_int64_timestamp_us(5, base_us), base_us + 5 * MICROS_IN_MIN); + + // Add 2 hours + EXPECT_EQ(timestampaddHour_int32_timestamp_ns(2, base_ns), base_ns + 2 * NANOS_IN_HOUR); + + // Add 1 day + EXPECT_EQ(timestampaddDay_int32_timestamp_sec(1, base_sec), base_sec + SECS_IN_DAY); + EXPECT_EQ(timestampaddDay_int32_timestamp_ns(1, base_ns), base_ns + NANOS_IN_DAY); + + // Add 1 week + EXPECT_EQ(timestampaddWeek_int64_timestamp_us(1, base_us), base_us + MICROS_IN_WEEK); + + // Add 1 month - uses calendar logic + // June 15 + 1 month = July 15 + // July 15 2023 00:00:00 = 1689379200 seconds + constexpr int64_t jul15_sec = 1689379200LL + 14 * 3600 + 30 * 60 + 45; // same time + EXPECT_EQ(timestampaddMonth_int32_timestamp_sec(1, base_sec), jul15_sec); + + // Add 1 quarter (3 months) - June 15 + 3 months = September 15 + constexpr int64_t sep15_sec = 1694779200LL + 14 * 3600 + 30 * 60 + 45; + EXPECT_EQ(timestampaddQuarter_int32_timestamp_sec(1, base_sec), sep15_sec); + + // Negative values + EXPECT_EQ(timestampaddSecond_int32_timestamp_sec(-30, base_sec), base_sec - 30); + EXPECT_EQ(timestampaddDay_int64_timestamp_ns(-1, base_ns), base_ns - NANOS_IN_DAY); +} + // test cases from http://www.staff.science.uu.nl/~gent0113/calendar/isocalendar.htm TEST(TestTime, TestExtractWeek) { std::vector data; @@ -1117,4 +1344,87 @@ TEST(TestTime, TestCastNullableInterval) { context.Reset(); } +TEST(TestTime, TestCastTimestampBetweenPrecisions) { + // Base timestamp: 2023-06-15 14:30:45 + constexpr int64_t epoch_sec = 1686839445LL; + constexpr int64_t epoch_ms = epoch_sec * 1000 + 123; + constexpr int64_t epoch_us = epoch_sec * 1000000 + 123456; + constexpr int64_t epoch_ns = epoch_sec * 1000000000LL + 123456789; + + // Cast from seconds to other precisions (upscale) + EXPECT_EQ(castTIMESTAMP_ms_timestamp_sec(epoch_sec), epoch_sec * 1000); + EXPECT_EQ(castTIMESTAMP_us_timestamp_sec(epoch_sec), epoch_sec * 1000000); + EXPECT_EQ(castTIMESTAMP_ns_timestamp_sec(epoch_sec), epoch_sec * 1000000000LL); + + // Cast from milliseconds to other precisions + EXPECT_EQ(castTIMESTAMP_sec_timestamp_ms(epoch_ms), epoch_sec); // truncates + EXPECT_EQ(castTIMESTAMP_us_timestamp_ms(epoch_ms), epoch_ms * 1000); + EXPECT_EQ(castTIMESTAMP_ns_timestamp_ms(epoch_ms), epoch_ms * 1000000); + + // Cast from microseconds to other precisions + EXPECT_EQ(castTIMESTAMP_sec_timestamp_us(epoch_us), epoch_sec); // truncates + EXPECT_EQ(castTIMESTAMP_ms_timestamp_us(epoch_us), epoch_sec * 1000 + 123); // truncates + EXPECT_EQ(castTIMESTAMP_ns_timestamp_us(epoch_us), epoch_us * 1000); + + // Cast from nanoseconds to other precisions + EXPECT_EQ(castTIMESTAMP_sec_timestamp_ns(epoch_ns), epoch_sec); // truncates + EXPECT_EQ(castTIMESTAMP_ms_timestamp_ns(epoch_ns), epoch_sec * 1000 + 123); // truncates + EXPECT_EQ(castTIMESTAMP_us_timestamp_ns(epoch_ns), epoch_sec * 1000000 + 123456); // truncates +} + +TEST(TestTime, TestCastDateTimestampPrecisions) { + // 2023-06-15 14:30:45 -> should cast to 2023-06-15 00:00:00 + constexpr int64_t epoch_sec = 1686839445LL; + constexpr int64_t epoch_ms = epoch_sec * 1000 + 123; + constexpr int64_t epoch_us = epoch_sec * 1000000 + 123456; + constexpr int64_t epoch_ns = epoch_sec * 1000000000LL + 123456789; + + // 2023-06-15 00:00:00 in milliseconds + constexpr int64_t date_ms = 1686787200LL * 1000; + + EXPECT_EQ(castDATE_timestamp_sec(epoch_sec), date_ms); + EXPECT_EQ(castDATE_timestamp_ms(epoch_ms), date_ms); + EXPECT_EQ(castDATE_timestamp_us(epoch_us), date_ms); + EXPECT_EQ(castDATE_timestamp_ns(epoch_ns), date_ms); +} + +TEST(TestTime, TestCastTimeTimestampPrecisions) { + // 2023-06-15 14:30:45.123 -> should return 14:30:45.123 as millis since midnight + constexpr int64_t epoch_sec = 1686839445LL; + constexpr int64_t epoch_ms = epoch_sec * 1000 + 123; + constexpr int64_t epoch_us = epoch_sec * 1000000 + 123456; + constexpr int64_t epoch_ns = epoch_sec * 1000000000LL + 123456789; + + // 14:30:45.123 in milliseconds since midnight + constexpr int32_t time_ms = 14 * 3600 * 1000 + 30 * 60 * 1000 + 45 * 1000 + 123; + // For second precision, no subsecond + constexpr int32_t time_sec_only = 14 * 3600 * 1000 + 30 * 60 * 1000 + 45 * 1000; + + EXPECT_EQ(castTIME_timestamp_sec(epoch_sec), time_sec_only); + EXPECT_EQ(castTIME_timestamp_ms(epoch_ms), time_ms); + EXPECT_EQ(castTIME_timestamp_us(epoch_us), time_ms); // truncates to millis + EXPECT_EQ(castTIME_timestamp_ns(epoch_ns), time_ms); // truncates to millis +} + +TEST(TestTime, TestDatediffTimestampPrecisions) { + // June 15 and June 17 - 2 days apart + constexpr int64_t jun15_sec = 1686839445LL; + constexpr int64_t jun17_sec = jun15_sec + 2 * SECS_IN_DAY; + + EXPECT_EQ(datediff_timestamp_sec_timestamp_sec(jun15_sec, jun17_sec), -2); + EXPECT_EQ(datediff_timestamp_sec_timestamp_sec(jun17_sec, jun15_sec), 2); + + constexpr int64_t jun15_ms = jun15_sec * 1000; + constexpr int64_t jun17_ms = jun17_sec * 1000; + EXPECT_EQ(datediff_timestamp_ms_timestamp_ms(jun15_ms, jun17_ms), -2); + + constexpr int64_t jun15_us = jun15_sec * 1000000; + constexpr int64_t jun17_us = jun17_sec * 1000000; + EXPECT_EQ(datediff_timestamp_us_timestamp_us(jun15_us, jun17_us), -2); + + constexpr int64_t jun15_ns = jun15_sec * 1000000000LL; + constexpr int64_t jun17_ns = jun17_sec * 1000000000LL; + EXPECT_EQ(datediff_timestamp_ns_timestamp_ns(jun15_ns, jun17_ns), -2); +} + } // namespace gandiva diff --git a/cpp/src/gandiva/precompiled/timestamp_arithmetic.cc b/cpp/src/gandiva/precompiled/timestamp_arithmetic.cc index 695605b3cc77..884412c31c59 100644 --- a/cpp/src/gandiva/precompiled/timestamp_arithmetic.cc +++ b/cpp/src/gandiva/precompiled/timestamp_arithmetic.cc @@ -280,4 +280,132 @@ ADD_TIMESTAMP_TO_INT64_FIXED_UNITS(date64, add, MILLIS_IN_DAY) ADD_TIMESTAMP_TO_INT64_FIXED_UNITS(timestamp, date_add, MILLIS_IN_DAY) ADD_TIMESTAMP_TO_INT64_FIXED_UNITS(timestamp, add, MILLIS_IN_DAY) +// ============================================================================ +// Precision-aware timestamp arithmetic functions +// ============================================================================ + +// Fixed unit addition for precision-specific timestamps +// SUFFIX: sec, ms, us, ns +// TYPE_ALIAS: gdv_timestamp_sec, gdv_timestamp_ms, etc. +// UNIT_VALUE: The multiplier for fixed units (e.g., 1 for sec, MICROS_IN_SEC for us) +#define ADD_INT32_TO_TIMESTAMP_PRECISION_FIXED(SUFFIX, TYPE_ALIAS, NAME, UNIT_VALUE) \ + FORCE_INLINE \ + TYPE_ALIAS NAME##_int32_timestamp_##SUFFIX(gdv_int32 count, TYPE_ALIAS value) { \ + return value + UNIT_VALUE * static_cast(count); \ + } \ + FORCE_INLINE \ + TYPE_ALIAS NAME##_timestamp_##SUFFIX##_int32(TYPE_ALIAS value, gdv_int32 count) { \ + return value + UNIT_VALUE * static_cast(count); \ + } + +#define ADD_INT64_TO_TIMESTAMP_PRECISION_FIXED(SUFFIX, TYPE_ALIAS, NAME, UNIT_VALUE) \ + FORCE_INLINE \ + TYPE_ALIAS NAME##_int64_timestamp_##SUFFIX(gdv_int64 count, TYPE_ALIAS value) { \ + return value + UNIT_VALUE * static_cast(count); \ + } \ + FORCE_INLINE \ + TYPE_ALIAS NAME##_timestamp_##SUFFIX##_int64(TYPE_ALIAS value, gdv_int64 count) { \ + return value + UNIT_VALUE * static_cast(count); \ + } + +// Month-based addition for precision-specific timestamps +// Needs to convert to/from milliseconds for EpochTimePoint calendar operations +#define ADD_INT32_TO_TIMESTAMP_PRECISION_MONTH(SUFFIX, TYPE_ALIAS, TP_TYPE, NAME, N_MONTHS, \ + TO_MILLIS, FROM_MILLIS) \ + FORCE_INLINE \ + TYPE_ALIAS NAME##_int32_timestamp_##SUFFIX(gdv_int32 count, TYPE_ALIAS value) { \ + EpochTimePoint tp(TO_MILLIS(value)); \ + return FROM_MILLIS(tp.AddMonths(static_cast(count * N_MONTHS)).MillisSinceEpoch()); \ + } \ + FORCE_INLINE \ + TYPE_ALIAS NAME##_timestamp_##SUFFIX##_int32(TYPE_ALIAS value, gdv_int32 count) { \ + EpochTimePoint tp(TO_MILLIS(value)); \ + return FROM_MILLIS(tp.AddMonths(static_cast(count * N_MONTHS)).MillisSinceEpoch()); \ + } + +#define ADD_INT64_TO_TIMESTAMP_PRECISION_MONTH(SUFFIX, TYPE_ALIAS, TP_TYPE, NAME, N_MONTHS, \ + TO_MILLIS, FROM_MILLIS) \ + FORCE_INLINE \ + TYPE_ALIAS NAME##_int64_timestamp_##SUFFIX(gdv_int64 count, TYPE_ALIAS value) { \ + EpochTimePoint tp(TO_MILLIS(value)); \ + return FROM_MILLIS(tp.AddMonths(static_cast(count * N_MONTHS)).MillisSinceEpoch()); \ + } \ + FORCE_INLINE \ + TYPE_ALIAS NAME##_timestamp_##SUFFIX##_int64(TYPE_ALIAS value, gdv_int64 count) { \ + EpochTimePoint tp(TO_MILLIS(value)); \ + return FROM_MILLIS(tp.AddMonths(static_cast(count * N_MONTHS)).MillisSinceEpoch()); \ + } + +// timestamp[s] - seconds precision +ADD_INT32_TO_TIMESTAMP_PRECISION_FIXED(sec, gdv_timestamp_sec, timestampaddSecond, 1) +ADD_INT32_TO_TIMESTAMP_PRECISION_FIXED(sec, gdv_timestamp_sec, timestampaddMinute, SECS_IN_MIN) +ADD_INT32_TO_TIMESTAMP_PRECISION_FIXED(sec, gdv_timestamp_sec, timestampaddHour, SECS_IN_HOUR) +ADD_INT32_TO_TIMESTAMP_PRECISION_FIXED(sec, gdv_timestamp_sec, timestampaddDay, SECS_IN_DAY) +ADD_INT32_TO_TIMESTAMP_PRECISION_FIXED(sec, gdv_timestamp_sec, timestampaddWeek, SECS_IN_WEEK) +ADD_INT64_TO_TIMESTAMP_PRECISION_FIXED(sec, gdv_timestamp_sec, timestampaddSecond, 1) +ADD_INT64_TO_TIMESTAMP_PRECISION_FIXED(sec, gdv_timestamp_sec, timestampaddMinute, SECS_IN_MIN) +ADD_INT64_TO_TIMESTAMP_PRECISION_FIXED(sec, gdv_timestamp_sec, timestampaddHour, SECS_IN_HOUR) +ADD_INT64_TO_TIMESTAMP_PRECISION_FIXED(sec, gdv_timestamp_sec, timestampaddDay, SECS_IN_DAY) +ADD_INT64_TO_TIMESTAMP_PRECISION_FIXED(sec, gdv_timestamp_sec, timestampaddWeek, SECS_IN_WEEK) +ADD_INT32_TO_TIMESTAMP_PRECISION_MONTH(sec, gdv_timestamp_sec, EpochTimePointSec, timestampaddMonth, 1, SECS_TO_MILLIS, MILLIS_TO_SEC) +ADD_INT32_TO_TIMESTAMP_PRECISION_MONTH(sec, gdv_timestamp_sec, EpochTimePointSec, timestampaddQuarter, 3, SECS_TO_MILLIS, MILLIS_TO_SEC) +ADD_INT32_TO_TIMESTAMP_PRECISION_MONTH(sec, gdv_timestamp_sec, EpochTimePointSec, timestampaddYear, 12, SECS_TO_MILLIS, MILLIS_TO_SEC) +ADD_INT64_TO_TIMESTAMP_PRECISION_MONTH(sec, gdv_timestamp_sec, EpochTimePointSec, timestampaddMonth, 1, SECS_TO_MILLIS, MILLIS_TO_SEC) +ADD_INT64_TO_TIMESTAMP_PRECISION_MONTH(sec, gdv_timestamp_sec, EpochTimePointSec, timestampaddQuarter, 3, SECS_TO_MILLIS, MILLIS_TO_SEC) +ADD_INT64_TO_TIMESTAMP_PRECISION_MONTH(sec, gdv_timestamp_sec, EpochTimePointSec, timestampaddYear, 12, SECS_TO_MILLIS, MILLIS_TO_SEC) + +// timestamp[ms] - milliseconds precision (explicit naming) +ADD_INT32_TO_TIMESTAMP_PRECISION_FIXED(ms, gdv_timestamp_ms, timestampaddSecond, MILLIS_IN_SEC) +ADD_INT32_TO_TIMESTAMP_PRECISION_FIXED(ms, gdv_timestamp_ms, timestampaddMinute, MILLIS_IN_MIN) +ADD_INT32_TO_TIMESTAMP_PRECISION_FIXED(ms, gdv_timestamp_ms, timestampaddHour, MILLIS_IN_HOUR) +ADD_INT32_TO_TIMESTAMP_PRECISION_FIXED(ms, gdv_timestamp_ms, timestampaddDay, MILLIS_IN_DAY) +ADD_INT32_TO_TIMESTAMP_PRECISION_FIXED(ms, gdv_timestamp_ms, timestampaddWeek, MILLIS_IN_WEEK) +ADD_INT64_TO_TIMESTAMP_PRECISION_FIXED(ms, gdv_timestamp_ms, timestampaddSecond, MILLIS_IN_SEC) +ADD_INT64_TO_TIMESTAMP_PRECISION_FIXED(ms, gdv_timestamp_ms, timestampaddMinute, MILLIS_IN_MIN) +ADD_INT64_TO_TIMESTAMP_PRECISION_FIXED(ms, gdv_timestamp_ms, timestampaddHour, MILLIS_IN_HOUR) +ADD_INT64_TO_TIMESTAMP_PRECISION_FIXED(ms, gdv_timestamp_ms, timestampaddDay, MILLIS_IN_DAY) +ADD_INT64_TO_TIMESTAMP_PRECISION_FIXED(ms, gdv_timestamp_ms, timestampaddWeek, MILLIS_IN_WEEK) +ADD_INT32_TO_TIMESTAMP_PRECISION_MONTH(ms, gdv_timestamp_ms, EpochTimePointMilli, timestampaddMonth, 1, , ) +ADD_INT32_TO_TIMESTAMP_PRECISION_MONTH(ms, gdv_timestamp_ms, EpochTimePointMilli, timestampaddQuarter, 3, , ) +ADD_INT32_TO_TIMESTAMP_PRECISION_MONTH(ms, gdv_timestamp_ms, EpochTimePointMilli, timestampaddYear, 12, , ) +ADD_INT64_TO_TIMESTAMP_PRECISION_MONTH(ms, gdv_timestamp_ms, EpochTimePointMilli, timestampaddMonth, 1, , ) +ADD_INT64_TO_TIMESTAMP_PRECISION_MONTH(ms, gdv_timestamp_ms, EpochTimePointMilli, timestampaddQuarter, 3, , ) +ADD_INT64_TO_TIMESTAMP_PRECISION_MONTH(ms, gdv_timestamp_ms, EpochTimePointMilli, timestampaddYear, 12, , ) + +// timestamp[us] - microseconds precision +ADD_INT32_TO_TIMESTAMP_PRECISION_FIXED(us, gdv_timestamp_us, timestampaddSecond, MICROS_IN_SEC) +ADD_INT32_TO_TIMESTAMP_PRECISION_FIXED(us, gdv_timestamp_us, timestampaddMinute, MICROS_IN_MIN) +ADD_INT32_TO_TIMESTAMP_PRECISION_FIXED(us, gdv_timestamp_us, timestampaddHour, MICROS_IN_HOUR) +ADD_INT32_TO_TIMESTAMP_PRECISION_FIXED(us, gdv_timestamp_us, timestampaddDay, MICROS_IN_DAY) +ADD_INT32_TO_TIMESTAMP_PRECISION_FIXED(us, gdv_timestamp_us, timestampaddWeek, MICROS_IN_WEEK) +ADD_INT64_TO_TIMESTAMP_PRECISION_FIXED(us, gdv_timestamp_us, timestampaddSecond, MICROS_IN_SEC) +ADD_INT64_TO_TIMESTAMP_PRECISION_FIXED(us, gdv_timestamp_us, timestampaddMinute, MICROS_IN_MIN) +ADD_INT64_TO_TIMESTAMP_PRECISION_FIXED(us, gdv_timestamp_us, timestampaddHour, MICROS_IN_HOUR) +ADD_INT64_TO_TIMESTAMP_PRECISION_FIXED(us, gdv_timestamp_us, timestampaddDay, MICROS_IN_DAY) +ADD_INT64_TO_TIMESTAMP_PRECISION_FIXED(us, gdv_timestamp_us, timestampaddWeek, MICROS_IN_WEEK) +ADD_INT32_TO_TIMESTAMP_PRECISION_MONTH(us, gdv_timestamp_us, EpochTimePointMicro, timestampaddMonth, 1, MICROS_TO_MILLIS, MILLIS_TO_MICROS) +ADD_INT32_TO_TIMESTAMP_PRECISION_MONTH(us, gdv_timestamp_us, EpochTimePointMicro, timestampaddQuarter, 3, MICROS_TO_MILLIS, MILLIS_TO_MICROS) +ADD_INT32_TO_TIMESTAMP_PRECISION_MONTH(us, gdv_timestamp_us, EpochTimePointMicro, timestampaddYear, 12, MICROS_TO_MILLIS, MILLIS_TO_MICROS) +ADD_INT64_TO_TIMESTAMP_PRECISION_MONTH(us, gdv_timestamp_us, EpochTimePointMicro, timestampaddMonth, 1, MICROS_TO_MILLIS, MILLIS_TO_MICROS) +ADD_INT64_TO_TIMESTAMP_PRECISION_MONTH(us, gdv_timestamp_us, EpochTimePointMicro, timestampaddQuarter, 3, MICROS_TO_MILLIS, MILLIS_TO_MICROS) +ADD_INT64_TO_TIMESTAMP_PRECISION_MONTH(us, gdv_timestamp_us, EpochTimePointMicro, timestampaddYear, 12, MICROS_TO_MILLIS, MILLIS_TO_MICROS) + +// timestamp[ns] - nanoseconds precision +ADD_INT32_TO_TIMESTAMP_PRECISION_FIXED(ns, gdv_timestamp_ns, timestampaddSecond, NANOS_IN_SEC) +ADD_INT32_TO_TIMESTAMP_PRECISION_FIXED(ns, gdv_timestamp_ns, timestampaddMinute, NANOS_IN_MIN) +ADD_INT32_TO_TIMESTAMP_PRECISION_FIXED(ns, gdv_timestamp_ns, timestampaddHour, NANOS_IN_HOUR) +ADD_INT32_TO_TIMESTAMP_PRECISION_FIXED(ns, gdv_timestamp_ns, timestampaddDay, NANOS_IN_DAY) +ADD_INT32_TO_TIMESTAMP_PRECISION_FIXED(ns, gdv_timestamp_ns, timestampaddWeek, NANOS_IN_WEEK) +ADD_INT64_TO_TIMESTAMP_PRECISION_FIXED(ns, gdv_timestamp_ns, timestampaddSecond, NANOS_IN_SEC) +ADD_INT64_TO_TIMESTAMP_PRECISION_FIXED(ns, gdv_timestamp_ns, timestampaddMinute, NANOS_IN_MIN) +ADD_INT64_TO_TIMESTAMP_PRECISION_FIXED(ns, gdv_timestamp_ns, timestampaddHour, NANOS_IN_HOUR) +ADD_INT64_TO_TIMESTAMP_PRECISION_FIXED(ns, gdv_timestamp_ns, timestampaddDay, NANOS_IN_DAY) +ADD_INT64_TO_TIMESTAMP_PRECISION_FIXED(ns, gdv_timestamp_ns, timestampaddWeek, NANOS_IN_WEEK) +ADD_INT32_TO_TIMESTAMP_PRECISION_MONTH(ns, gdv_timestamp_ns, EpochTimePointNano, timestampaddMonth, 1, NANOS_TO_MILLIS, MILLIS_TO_NANOS) +ADD_INT32_TO_TIMESTAMP_PRECISION_MONTH(ns, gdv_timestamp_ns, EpochTimePointNano, timestampaddQuarter, 3, NANOS_TO_MILLIS, MILLIS_TO_NANOS) +ADD_INT32_TO_TIMESTAMP_PRECISION_MONTH(ns, gdv_timestamp_ns, EpochTimePointNano, timestampaddYear, 12, NANOS_TO_MILLIS, MILLIS_TO_NANOS) +ADD_INT64_TO_TIMESTAMP_PRECISION_MONTH(ns, gdv_timestamp_ns, EpochTimePointNano, timestampaddMonth, 1, NANOS_TO_MILLIS, MILLIS_TO_NANOS) +ADD_INT64_TO_TIMESTAMP_PRECISION_MONTH(ns, gdv_timestamp_ns, EpochTimePointNano, timestampaddQuarter, 3, NANOS_TO_MILLIS, MILLIS_TO_NANOS) +ADD_INT64_TO_TIMESTAMP_PRECISION_MONTH(ns, gdv_timestamp_ns, EpochTimePointNano, timestampaddYear, 12, NANOS_TO_MILLIS, MILLIS_TO_NANOS) + } // extern "C" diff --git a/cpp/src/gandiva/precompiled/types.h b/cpp/src/gandiva/precompiled/types.h index c93b694fc777..e885205718f7 100644 --- a/cpp/src/gandiva/precompiled/types.h +++ b/cpp/src/gandiva/precompiled/types.h @@ -37,6 +37,14 @@ using gdv_date64 = int64_t; using gdv_date32 = int32_t; using gdv_time32 = int32_t; using gdv_timestamp = int64_t; + +// Precision-specific timestamp type aliases +// All are int64_t but document the expected unit for clarity +using gdv_timestamp_sec = int64_t; // seconds since epoch +using gdv_timestamp_ms = int64_t; // milliseconds since epoch +using gdv_timestamp_us = int64_t; // microseconds since epoch +using gdv_timestamp_ns = int64_t; // nanoseconds since epoch + using gdv_utf8 = char*; using gdv_binary = char*; using gdv_day_time_interval = int64_t; @@ -67,10 +75,36 @@ gdv_int64 extractDay_timestamp(gdv_timestamp millis); gdv_int64 extractHour_timestamp(gdv_timestamp millis); gdv_int64 extractMinute_timestamp(gdv_timestamp millis); gdv_int64 extractSecond_timestamp(gdv_timestamp millis); +gdv_int64 extractEpoch_timestamp(gdv_timestamp millis); gdv_int64 extractHour_time32(gdv_int32 millis_in_day); gdv_int64 extractMinute_time32(gdv_int32 millis_in_day); gdv_int64 extractSecond_time32(gdv_int32 millis_in_day); +// Precision-specific extract function declarations +// Macro to declare all precision variants for an extract function +#define DECLARE_EXTRACT_TIMESTAMP_PRECISION(NAME) \ + gdv_int64 NAME##_timestamp_sec(gdv_timestamp_sec secs); \ + gdv_int64 NAME##_timestamp_ms(gdv_timestamp_ms millis); \ + gdv_int64 NAME##_timestamp_us(gdv_timestamp_us micros); \ + gdv_int64 NAME##_timestamp_ns(gdv_timestamp_ns nanos); + +DECLARE_EXTRACT_TIMESTAMP_PRECISION(extractYear) +DECLARE_EXTRACT_TIMESTAMP_PRECISION(extractMonth) +DECLARE_EXTRACT_TIMESTAMP_PRECISION(extractDay) +DECLARE_EXTRACT_TIMESTAMP_PRECISION(extractHour) +DECLARE_EXTRACT_TIMESTAMP_PRECISION(extractMinute) +DECLARE_EXTRACT_TIMESTAMP_PRECISION(extractSecond) +DECLARE_EXTRACT_TIMESTAMP_PRECISION(extractDoy) +DECLARE_EXTRACT_TIMESTAMP_PRECISION(extractDow) +DECLARE_EXTRACT_TIMESTAMP_PRECISION(extractWeek) +DECLARE_EXTRACT_TIMESTAMP_PRECISION(extractQuarter) +DECLARE_EXTRACT_TIMESTAMP_PRECISION(extractEpoch) +DECLARE_EXTRACT_TIMESTAMP_PRECISION(extractMillennium) +DECLARE_EXTRACT_TIMESTAMP_PRECISION(extractCentury) +DECLARE_EXTRACT_TIMESTAMP_PRECISION(extractDecade) + +#undef DECLARE_EXTRACT_TIMESTAMP_PRECISION + gdv_int32 hash32(double val, gdv_int32 seed); gdv_int32 hash32_buf(const gdv_uint8* buf, int len, gdv_int32 seed); gdv_int64 hash64(double val, gdv_int64 seed); @@ -170,6 +204,31 @@ gdv_int64 date_sub_timestamp_int32(gdv_timestamp, gdv_int32); gdv_int64 subtract_timestamp_int32(gdv_timestamp, gdv_int32); gdv_int64 date_diff_timestamp_int64(gdv_timestamp, gdv_int64); +// Precision-specific timestampadd function declarations +#define DECLARE_TIMESTAMPADD_PRECISION(NAME, SUFFIX, TYPE_ALIAS) \ + TYPE_ALIAS NAME##_int32_timestamp_##SUFFIX(gdv_int32, TYPE_ALIAS); \ + TYPE_ALIAS NAME##_timestamp_##SUFFIX##_int32(TYPE_ALIAS, gdv_int32); \ + TYPE_ALIAS NAME##_int64_timestamp_##SUFFIX(gdv_int64, TYPE_ALIAS); \ + TYPE_ALIAS NAME##_timestamp_##SUFFIX##_int64(TYPE_ALIAS, gdv_int64); + +#define DECLARE_ALL_TIMESTAMPADD_PRECISION(NAME) \ + DECLARE_TIMESTAMPADD_PRECISION(NAME, sec, gdv_timestamp_sec) \ + DECLARE_TIMESTAMPADD_PRECISION(NAME, ms, gdv_timestamp_ms) \ + DECLARE_TIMESTAMPADD_PRECISION(NAME, us, gdv_timestamp_us) \ + DECLARE_TIMESTAMPADD_PRECISION(NAME, ns, gdv_timestamp_ns) + +DECLARE_ALL_TIMESTAMPADD_PRECISION(timestampaddSecond) +DECLARE_ALL_TIMESTAMPADD_PRECISION(timestampaddMinute) +DECLARE_ALL_TIMESTAMPADD_PRECISION(timestampaddHour) +DECLARE_ALL_TIMESTAMPADD_PRECISION(timestampaddDay) +DECLARE_ALL_TIMESTAMPADD_PRECISION(timestampaddWeek) +DECLARE_ALL_TIMESTAMPADD_PRECISION(timestampaddMonth) +DECLARE_ALL_TIMESTAMPADD_PRECISION(timestampaddQuarter) +DECLARE_ALL_TIMESTAMPADD_PRECISION(timestampaddYear) + +#undef DECLARE_TIMESTAMPADD_PRECISION +#undef DECLARE_ALL_TIMESTAMPADD_PRECISION + gdv_boolean castBIT_utf8(gdv_int64 context, const char* data, gdv_int32 data_len); bool is_distinct_from_timestamp_timestamp(gdv_int64, bool, gdv_int64, bool); @@ -189,6 +248,33 @@ gdv_int32 datediff_timestamp_timestamp(gdv_timestamp start_millis, gdv_timestamp end_millis); gdv_int64 date_trunc_Week_timestamp(gdv_timestamp); + +// Precision-specific date_trunc declarations +#define DECLARE_DATE_TRUNC_TIMESTAMP_PRECISION(NAME) \ + gdv_timestamp_sec NAME##_timestamp_sec(gdv_timestamp_sec); \ + gdv_timestamp_ms NAME##_timestamp_ms(gdv_timestamp_ms); \ + gdv_timestamp_us NAME##_timestamp_us(gdv_timestamp_us); \ + gdv_timestamp_ns NAME##_timestamp_ns(gdv_timestamp_ns); + +DECLARE_DATE_TRUNC_TIMESTAMP_PRECISION(date_trunc_Second) +DECLARE_DATE_TRUNC_TIMESTAMP_PRECISION(date_trunc_Minute) +DECLARE_DATE_TRUNC_TIMESTAMP_PRECISION(date_trunc_Hour) +DECLARE_DATE_TRUNC_TIMESTAMP_PRECISION(date_trunc_Day) +DECLARE_DATE_TRUNC_TIMESTAMP_PRECISION(date_trunc_Week) +DECLARE_DATE_TRUNC_TIMESTAMP_PRECISION(date_trunc_Month) +DECLARE_DATE_TRUNC_TIMESTAMP_PRECISION(date_trunc_Quarter) +DECLARE_DATE_TRUNC_TIMESTAMP_PRECISION(date_trunc_Year) +DECLARE_DATE_TRUNC_TIMESTAMP_PRECISION(date_trunc_Decade) +DECLARE_DATE_TRUNC_TIMESTAMP_PRECISION(date_trunc_Century) +DECLARE_DATE_TRUNC_TIMESTAMP_PRECISION(date_trunc_Millennium) + +#undef DECLARE_DATE_TRUNC_TIMESTAMP_PRECISION + +// New sub-millisecond truncation functions +gdv_timestamp_us date_trunc_Millisecond_timestamp_us(gdv_timestamp_us); +gdv_timestamp_ns date_trunc_Millisecond_timestamp_ns(gdv_timestamp_ns); +gdv_timestamp_ns date_trunc_Microsecond_timestamp_ns(gdv_timestamp_ns); + double months_between_timestamp_timestamp(gdv_uint64, gdv_uint64); gdv_int32 mem_compare(const char* left, gdv_int32 left_len, const char* right, @@ -461,6 +547,51 @@ gdv_date64 next_day_from_timestamp(gdv_int64 context, gdv_date64 millis, const c gdv_int64 truncate_int64_int32(gdv_int64 in, gdv_int32 out_scale); +// Precision-specific timestamp cast function declarations +// Cast between timestamp precisions +gdv_timestamp_ms castTIMESTAMP_ms_timestamp_sec(gdv_timestamp_sec); +gdv_timestamp_us castTIMESTAMP_us_timestamp_sec(gdv_timestamp_sec); +gdv_timestamp_ns castTIMESTAMP_ns_timestamp_sec(gdv_timestamp_sec); +gdv_timestamp_sec castTIMESTAMP_sec_timestamp_ms(gdv_timestamp_ms); +gdv_timestamp_us castTIMESTAMP_us_timestamp_ms(gdv_timestamp_ms); +gdv_timestamp_ns castTIMESTAMP_ns_timestamp_ms(gdv_timestamp_ms); +gdv_timestamp_sec castTIMESTAMP_sec_timestamp_us(gdv_timestamp_us); +gdv_timestamp_ms castTIMESTAMP_ms_timestamp_us(gdv_timestamp_us); +gdv_timestamp_ns castTIMESTAMP_ns_timestamp_us(gdv_timestamp_us); +gdv_timestamp_sec castTIMESTAMP_sec_timestamp_ns(gdv_timestamp_ns); +gdv_timestamp_ms castTIMESTAMP_ms_timestamp_ns(gdv_timestamp_ns); +gdv_timestamp_us castTIMESTAMP_us_timestamp_ns(gdv_timestamp_ns); + +// Cast timestamp to date64 for all precisions +gdv_date64 castDATE_timestamp_sec(gdv_timestamp_sec); +gdv_date64 castDATE_timestamp_ms(gdv_timestamp_ms); +gdv_date64 castDATE_timestamp_us(gdv_timestamp_us); +gdv_date64 castDATE_timestamp_ns(gdv_timestamp_ns); + +// Cast timestamp to time32 for all precisions +gdv_time32 castTIME_timestamp_sec(gdv_timestamp_sec); +gdv_time32 castTIME_timestamp_ms(gdv_timestamp_ms); +gdv_time32 castTIME_timestamp_us(gdv_timestamp_us); +gdv_time32 castTIME_timestamp_ns(gdv_timestamp_ns); + +// datediff for all precisions +gdv_int32 datediff_timestamp_sec_timestamp_sec(gdv_timestamp_sec, gdv_timestamp_sec); +gdv_int32 datediff_timestamp_ms_timestamp_ms(gdv_timestamp_ms, gdv_timestamp_ms); +gdv_int32 datediff_timestamp_us_timestamp_us(gdv_timestamp_us, gdv_timestamp_us); +gdv_int32 datediff_timestamp_ns_timestamp_ns(gdv_timestamp_ns, gdv_timestamp_ns); + +// months_between for all precisions +double months_between_timestamp_sec_timestamp_sec(gdv_uint64, gdv_uint64); +double months_between_timestamp_ms_timestamp_ms(gdv_uint64, gdv_uint64); +double months_between_timestamp_us_timestamp_us(gdv_uint64, gdv_uint64); +double months_between_timestamp_ns_timestamp_ns(gdv_uint64, gdv_uint64); + +// last_day for all precisions +gdv_date64 last_day_timestamp_sec(gdv_timestamp_sec); +gdv_date64 last_day_timestamp_ms(gdv_timestamp_ms); +gdv_date64 last_day_timestamp_us(gdv_timestamp_us); +gdv_date64 last_day_timestamp_ns(gdv_timestamp_ns); + const char* repeat_utf8_int32(gdv_int64 context, const char* in, gdv_int32 in_len, gdv_int32 repeat_times, gdv_int32* out_len); diff --git a/cpp/src/gandiva/tests/date_time_test.cc b/cpp/src/gandiva/tests/date_time_test.cc index 6208f1ecba9b..1adeb52415db 100644 --- a/cpp/src/gandiva/tests/date_time_test.cc +++ b/cpp/src/gandiva/tests/date_time_test.cc @@ -828,4 +828,77 @@ TEST_F(DateTimeTestProjector, TestFromUtcTimestamp) { // Validate results EXPECT_ARROW_ARRAY_EQUALS(exp_output, outputs.at(0)); } + +TEST_F(DateTimeTestProjector, TestExtractYearTimestampPrecisions) { + // Test that extractYear works correctly with all 4 timestamp precisions + auto field_ts_sec = field("ts_sec", timestamp(arrow::TimeUnit::SECOND)); + auto field_ts_ms = field("ts_ms", timestamp(arrow::TimeUnit::MILLI)); + auto field_ts_us = field("ts_us", timestamp(arrow::TimeUnit::MICRO)); + auto field_ts_ns = field("ts_ns", timestamp(arrow::TimeUnit::NANO)); + auto schema = arrow::schema({field_ts_sec, field_ts_ms, field_ts_us, field_ts_ns}); + + // Output fields + auto field_year_sec = field("year_sec", int64()); + auto field_year_ms = field("year_ms", int64()); + auto field_year_us = field("year_us", int64()); + auto field_year_ns = field("year_ns", int64()); + + // Build expressions for each precision + auto expr_sec = + TreeExprBuilder::MakeExpression("extractYear", {field_ts_sec}, field_year_sec); + auto expr_ms = + TreeExprBuilder::MakeExpression("extractYear", {field_ts_ms}, field_year_ms); + auto expr_us = + TreeExprBuilder::MakeExpression("extractYear", {field_ts_us}, field_year_us); + auto expr_ns = + TreeExprBuilder::MakeExpression("extractYear", {field_ts_ns}, field_year_ns); + + // Build projector + std::shared_ptr projector; + auto status = Projector::Make(schema, {expr_sec, expr_ms, expr_us, expr_ns}, + TestConfiguration(), &projector); + ASSERT_OK(status); + + // Test data: 2023-06-15 14:30:45.123456789 + // Unix epoch seconds: 1686839445 + constexpr int64_t epoch_sec = 1686839445LL; + constexpr int num_records = 3; + + // Input arrays with different values to test year extraction + // 2023-06-15, 1999-12-31, 2000-01-01 + constexpr int64_t dec31_1999_sec = 946684799LL; + constexpr int64_t jan1_2000_sec = 946684800LL; + + auto array_sec = + MakeArrowArrayInt64({epoch_sec, dec31_1999_sec, jan1_2000_sec}, {true, true, true}); + auto array_ms = MakeArrowArrayInt64( + {epoch_sec * 1000 + 123, dec31_1999_sec * 1000, jan1_2000_sec * 1000}, + {true, true, true}); + auto array_us = MakeArrowArrayInt64( + {epoch_sec * 1000000 + 123456, dec31_1999_sec * 1000000, jan1_2000_sec * 1000000}, + {true, true, true}); + auto array_ns = MakeArrowArrayInt64({epoch_sec * 1000000000LL + 123456789, + dec31_1999_sec * 1000000000LL, + jan1_2000_sec * 1000000000LL}, + {true, true, true}); + + // Create record batch + auto in_batch = + arrow::RecordBatch::Make(schema, num_records, {array_sec, array_ms, array_us, array_ns}); + + // Evaluate + arrow::ArrayVector outputs; + status = projector->Evaluate(*in_batch, pool_, &outputs); + ASSERT_OK(status); + + // Expected results: 2023, 1999, 2000 for all precisions + auto exp_years = MakeArrowArrayInt64({2023, 1999, 2000}, {true, true, true}); + + // All precisions should give the same year values + EXPECT_ARROW_ARRAY_EQUALS(exp_years, outputs.at(0)); // seconds + EXPECT_ARROW_ARRAY_EQUALS(exp_years, outputs.at(1)); // milliseconds + EXPECT_ARROW_ARRAY_EQUALS(exp_years, outputs.at(2)); // microseconds + EXPECT_ARROW_ARRAY_EQUALS(exp_years, outputs.at(3)); // nanoseconds +} + } // namespace gandiva diff --git a/java/adapter/avro/target/classes/arrow-git.properties b/java/adapter/avro/target/classes/arrow-git.properties new file mode 100644 index 000000000000..3842a3c63bc1 --- /dev/null +++ b/java/adapter/avro/target/classes/arrow-git.properties @@ -0,0 +1,26 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=21.08.2025 @ 12\:10\:08 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Ra\u00FAl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 diff --git a/java/adapter/avro/target/test-classes/logback.xml b/java/adapter/avro/target/test-classes/logback.xml new file mode 100644 index 000000000000..4c54d18a210f --- /dev/null +++ b/java/adapter/avro/target/test-classes/logback.xml @@ -0,0 +1,28 @@ + + + + + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + + + diff --git a/java/adapter/avro/target/test-classes/schema/attrs/test_enum_attrs.avsc b/java/adapter/avro/target/test-classes/schema/attrs/test_enum_attrs.avsc new file mode 100644 index 000000000000..afd00b8d9f7d --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/attrs/test_enum_attrs.avsc @@ -0,0 +1,24 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "type": "enum", + "symbols" : ["SPADES", "HEARTS", "DIAMONDS", "CLUBS"], + "name": "testEnum", + "doc" : "enum doc", + "aliases" : ["alias1", "alias2"] +} diff --git a/java/adapter/avro/target/test-classes/schema/attrs/test_fixed_attr.avsc b/java/adapter/avro/target/test-classes/schema/attrs/test_fixed_attr.avsc new file mode 100644 index 000000000000..55e504def175 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/attrs/test_fixed_attr.avsc @@ -0,0 +1,24 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "type": "fixed", + "size": 6, + "name": "testFixed", + "doc" : "fixed doc", + "aliases" : ["alias1", "alias2"] +} diff --git a/java/adapter/avro/target/test-classes/schema/attrs/test_record_attrs.avsc b/java/adapter/avro/target/test-classes/schema/attrs/test_record_attrs.avsc new file mode 100644 index 000000000000..2e2e311a9d54 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/attrs/test_record_attrs.avsc @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "testAttrs", + "fields": [ + { + "name" : "f0", + "type" : { + "type" : "record", + "name" : "nestedInRecord", + "doc" : "f0 doc", + "aliases" : ["f0.a1"], + "fields": [ + {"name": "f1", "type": "string", "doc": "f1 doc", "aliases" : ["f1.a1", "f1.a2"]}, + {"name": "f2", "type": "int", "doc": "f2 doc", "aliases" : ["f2.a1", "f2.a2"]} + ] + } + } + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/logical/test_date.avsc b/java/adapter/avro/target/test-classes/schema/logical/test_date.avsc new file mode 100644 index 000000000000..f661e65062db --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/logical/test_date.avsc @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "name": "test", + "type": "int", + "logicalType" : "date" +} diff --git a/java/adapter/avro/target/test-classes/schema/logical/test_decimal_invalid1.avsc b/java/adapter/avro/target/test-classes/schema/logical/test_decimal_invalid1.avsc new file mode 100644 index 000000000000..18d7d63fc733 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/logical/test_decimal_invalid1.avsc @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "name": "test", + "type": "bytes", + "logicalType" : "decimal", + "precision": 39, + "scale": 2 +} diff --git a/java/adapter/avro/target/test-classes/schema/logical/test_decimal_invalid2.avsc b/java/adapter/avro/target/test-classes/schema/logical/test_decimal_invalid2.avsc new file mode 100644 index 000000000000..eed7bd7811d1 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/logical/test_decimal_invalid2.avsc @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "name": "test", + "type": "bytes", + "logicalType" : "decimal", + "precision": 20, + "scale": -1 +} diff --git a/java/adapter/avro/target/test-classes/schema/logical/test_decimal_invalid3.avsc b/java/adapter/avro/target/test-classes/schema/logical/test_decimal_invalid3.avsc new file mode 100644 index 000000000000..1667b8aff87d --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/logical/test_decimal_invalid3.avsc @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "name": "test", + "type": "bytes", + "logicalType" : "decimal", + "precision": 20, + "scale": 40 +} diff --git a/java/adapter/avro/target/test-classes/schema/logical/test_decimal_invalid4.avsc b/java/adapter/avro/target/test-classes/schema/logical/test_decimal_invalid4.avsc new file mode 100644 index 000000000000..e1f710416f91 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/logical/test_decimal_invalid4.avsc @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "name": "test", + "type": "fixed", + "size" : 1, + "logicalType" : "decimal", + "precision": 30, + "scale": 2 +} diff --git a/java/adapter/avro/target/test-classes/schema/logical/test_decimal_with_original_bytes.avsc b/java/adapter/avro/target/test-classes/schema/logical/test_decimal_with_original_bytes.avsc new file mode 100644 index 000000000000..944b5d85d6df --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/logical/test_decimal_with_original_bytes.avsc @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "name": "test", + "type": "bytes", + "logicalType" : "decimal", + "precision": 10, + "scale": 2 +} diff --git a/java/adapter/avro/target/test-classes/schema/logical/test_decimal_with_original_fixed.avsc b/java/adapter/avro/target/test-classes/schema/logical/test_decimal_with_original_fixed.avsc new file mode 100644 index 000000000000..1901f90a975f --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/logical/test_decimal_with_original_fixed.avsc @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "name": "test", + "type": "fixed", + "size" : 10, + "logicalType" : "decimal", + "precision": 10, + "scale": 2 +} diff --git a/java/adapter/avro/target/test-classes/schema/logical/test_time_micros.avsc b/java/adapter/avro/target/test-classes/schema/logical/test_time_micros.avsc new file mode 100644 index 000000000000..ee7d4e9378aa --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/logical/test_time_micros.avsc @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "name": "test", + "type": "long", + "logicalType" : "time-micros" +} diff --git a/java/adapter/avro/target/test-classes/schema/logical/test_time_millis.avsc b/java/adapter/avro/target/test-classes/schema/logical/test_time_millis.avsc new file mode 100644 index 000000000000..54877babc81b --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/logical/test_time_millis.avsc @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "name": "test", + "type": "int", + "logicalType" : "time-millis" +} diff --git a/java/adapter/avro/target/test-classes/schema/logical/test_timestamp_micros.avsc b/java/adapter/avro/target/test-classes/schema/logical/test_timestamp_micros.avsc new file mode 100644 index 000000000000..15c0bf53dca8 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/logical/test_timestamp_micros.avsc @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "name": "test", + "type": "long", + "logicalType" : "timestamp-micros" +} diff --git a/java/adapter/avro/target/test-classes/schema/logical/test_timestamp_millis.avsc b/java/adapter/avro/target/test-classes/schema/logical/test_timestamp_millis.avsc new file mode 100644 index 000000000000..822a2c360c5f --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/logical/test_timestamp_millis.avsc @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "name": "test", + "type": "long", + "logicalType" : "timestamp-millis" +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_array_before.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_array_before.avsc new file mode 100644 index 000000000000..e836aa768536 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_array_before.avsc @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "test", + "fields": [ + {"name": "f0", "type": "string"}, + {"name": "f1", "type": {"type" : "array", "items": "string"}}, + {"name": "f2", "type": "boolean"} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_array_expected.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_array_expected.avsc new file mode 100644 index 000000000000..36e7fdfb066f --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_array_expected.avsc @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "test", + "fields": [ + {"name": "f0", "type": "string"}, + {"name": "f2", "type": "boolean"} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_base1.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_base1.avsc new file mode 100644 index 000000000000..5338253f45f7 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_base1.avsc @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "testRecord", + "fields": [ + {"name": "f0", "type": {"type" : "fixed", "size":5, "name" : "fix"}}, + {"name": "f1", "type": {"type" : "enum", "name" : "enum", "symbols": ["TEST0", "TEST1"]}}, + {"name": "f2", "type": "string"}, + {"name": "f3", "type": "bytes"} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_base2.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_base2.avsc new file mode 100644 index 000000000000..50655a70e129 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_base2.avsc @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "testRecord", + "fields": [ + {"name": "f0", "type": "boolean"}, + {"name": "f1", "type": "int"}, + {"name": "f2", "type": "long"}, + {"name": "f3", "type": "float"}, + {"name": "f4", "type": "double"} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_boolean_expected.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_boolean_expected.avsc new file mode 100644 index 000000000000..9b62e3149ffc --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_boolean_expected.avsc @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "testRecord", + "fields": [ + {"name": "f1", "type": "int"}, + {"name": "f2", "type": "long"}, + {"name": "f3", "type": "float"}, + {"name": "f4", "type": "double"} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_bytes_expected.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_bytes_expected.avsc new file mode 100644 index 000000000000..8a1903b34da8 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_bytes_expected.avsc @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "testRecord", + "fields": [ + {"name": "f0", "type": {"type" : "fixed", "size":5, "name" : "fix"}}, + {"name": "f1", "type": {"type" : "enum", "name" : "enum", "symbols": ["TEST0", "TEST1"]}}, + {"name": "f2", "type": "string"} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_double_expected.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_double_expected.avsc new file mode 100644 index 000000000000..6021c445413c --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_double_expected.avsc @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "testRecord", + "fields": [ + {"name": "f0", "type": "boolean"}, + {"name": "f1", "type": "int"}, + {"name": "f2", "type": "long"}, + {"name": "f3", "type": "float"} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_enum_expected.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_enum_expected.avsc new file mode 100644 index 000000000000..f5ed86a2892d --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_enum_expected.avsc @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "testRecord", + "fields": [ + {"name": "f0", "type": {"type" : "fixed", "size":5, "name" : "fix"}}, + {"name": "f2", "type": "string"}, + {"name": "f3", "type": "bytes"} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_fixed_expected.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_fixed_expected.avsc new file mode 100644 index 000000000000..5423a7977c67 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_fixed_expected.avsc @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "testRecord", + "fields": [ + {"name": "f1", "type": {"type" : "enum", "name" : "enum", "symbols": ["TEST0", "TEST1"]}}, + {"name": "f2", "type": "string"}, + {"name": "f3", "type": "bytes"} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_float_expected.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_float_expected.avsc new file mode 100644 index 000000000000..dea106331a9e --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_float_expected.avsc @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "testRecord", + "fields": [ + {"name": "f0", "type": "boolean"}, + {"name": "f1", "type": "int"}, + {"name": "f2", "type": "long"}, + {"name": "f4", "type": "double"} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_int_expected.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_int_expected.avsc new file mode 100644 index 000000000000..53d4f1025b4a --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_int_expected.avsc @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "testRecord", + "fields": [ + {"name": "f0", "type": "boolean"}, + {"name": "f2", "type": "long"}, + {"name": "f3", "type": "float"}, + {"name": "f4", "type": "double"} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_long_expected.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_long_expected.avsc new file mode 100644 index 000000000000..bf16601dd458 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_long_expected.avsc @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "testRecord", + "fields": [ + {"name": "f0", "type": "boolean"}, + {"name": "f1", "type": "int"}, + {"name": "f3", "type": "float"}, + {"name": "f4", "type": "double"} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_map_before.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_map_before.avsc new file mode 100644 index 000000000000..8cbb1a1d7206 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_map_before.avsc @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "test", + "fields": [ + {"name": "f0", "type": "string"}, + {"name": "f1", "type": {"type" : "map", "values": "string"}}, + {"name": "f2", "type": "boolean"} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_map_expected.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_map_expected.avsc new file mode 100644 index 000000000000..36e7fdfb066f --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_map_expected.avsc @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "test", + "fields": [ + {"name": "f0", "type": "string"}, + {"name": "f2", "type": "boolean"} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_multi_fields_expected.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_multi_fields_expected.avsc new file mode 100644 index 000000000000..b5d637b1daae --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_multi_fields_expected.avsc @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "testSkip", + "fields": [ + {"name": "f0", "type": "string"} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_record_before.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_record_before.avsc new file mode 100644 index 000000000000..7aee92b924e3 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_record_before.avsc @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "test", + "fields": [ + { + "name" : "f0", + "type" : { + "type" : "record", + "name" : "nestedInRecord", + "fields": [ + {"name": "f00", "type": "string"}, + {"name": "f01", "type": "int"} + ] + } + }, + { + "name" : "f1", "type" : "int" + } + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_record_expected.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_record_expected.avsc new file mode 100644 index 000000000000..3e2495203517 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_record_expected.avsc @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "test", + "fields": [ + { "name" : "f1", "type" : "int"} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_second_level_expected.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_second_level_expected.avsc new file mode 100644 index 000000000000..f3b7f8c09740 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_second_level_expected.avsc @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "testSkipNested", + "fields": [ + { + "name" : "nested", + "type" : { + "type" : "record", + "name" : "nestedInRecord", + "fields": [ + {"name": "f1", "type": "int"} + ] + } + } + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_single_field_expected.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_single_field_expected.avsc new file mode 100644 index 000000000000..553525847d0a --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_single_field_expected.avsc @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "testSkip", + "fields": [ + {"name": "f0", "type": "string"}, + {"name": "f2", "type": "boolean"} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_string_expected.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_string_expected.avsc new file mode 100644 index 000000000000..2d2c0817434c --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_string_expected.avsc @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "testRecord", + "fields": [ + {"name": "f0", "type": {"type" : "fixed", "size":5, "name" : "fix"}}, + {"name": "f1", "type": {"type" : "enum", "name" : "enum", "symbols": ["TEST0", "TEST1"]}}, + {"name": "f3", "type": "bytes"} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_third_level_expected.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_third_level_expected.avsc new file mode 100644 index 000000000000..6f42da893daa --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_third_level_expected.avsc @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "firstLevel", + "fields": [ + { + "name" : "f0", + "type" : { + "type" : "record", + "name" : "secondLevel", + "fields": [ + { + "name" : "f0", + "type" : { + "type" : "record", + "name" : "thirdLevel", + "fields" : [ + {"name": "f1", "type": "int"}, + {"name": "f0", "type": "string"}, + {"name": "f2", "type": "boolean"} + ] + } + } + ] + } + } + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_union_before.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_union_before.avsc new file mode 100644 index 000000000000..fc1105911dd8 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_union_before.avsc @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "test", + "fields": [ + {"name": "f0", "type": ["string"]}, + {"name": "f1", "type": ["string", "null"]}, + {"name": "f2", "type": ["string", "int"]}, + {"name": "f3", "type": "int"} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_union_multi_fields_expected.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_union_multi_fields_expected.avsc new file mode 100644 index 000000000000..308e027a26e0 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_union_multi_fields_expected.avsc @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "test", + "fields": [ + {"name": "f0", "type": ["string"]}, + {"name": "f1", "type": ["string", "null"]}, + {"name": "f3", "type": "int"} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_union_nullable_field_expected.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_union_nullable_field_expected.avsc new file mode 100644 index 000000000000..cbc83e5666e1 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_union_nullable_field_expected.avsc @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "test", + "fields": [ + {"name": "f0", "type": ["string"]}, + {"name": "f2", "type": ["string", "int"]}, + {"name": "f3", "type": "int"} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/skip/test_skip_union_one_field_expected.avsc b/java/adapter/avro/target/test-classes/schema/skip/test_skip_union_one_field_expected.avsc new file mode 100644 index 000000000000..0f72fb432fbc --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/skip/test_skip_union_one_field_expected.avsc @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "test", + "fields": [ + {"name": "f1", "type": ["string", "null"]}, + {"name": "f2", "type": ["string", "int"]}, + {"name": "f3", "type": ["string", "int"]} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/test.avsc b/java/adapter/avro/target/test-classes/schema/test.avsc new file mode 100644 index 000000000000..92c0873de1da --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test.avsc @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "User", + "fields": [ + {"name": "name", "type": "string"}, + {"name": "favorite_number", "type": ["int", "null"]}, + {"name": "favorite_color", "type": ["string", "null"]} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/test_array.avsc b/java/adapter/avro/target/test-classes/schema/test_array.avsc new file mode 100644 index 000000000000..5b75a4031d89 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_array.avsc @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "array", + "items": "string", + "name": "testArray" +} diff --git a/java/adapter/avro/target/test-classes/schema/test_fixed.avsc b/java/adapter/avro/target/test-classes/schema/test_fixed.avsc new file mode 100644 index 000000000000..a4d96e9ab550 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_fixed.avsc @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "fixed", + "size": 6, + "name": "testFixed" +} diff --git a/java/adapter/avro/target/test-classes/schema/test_large_data.avsc b/java/adapter/avro/target/test-classes/schema/test_large_data.avsc new file mode 100644 index 000000000000..f784ae62337a --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_large_data.avsc @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "testLargeData", + "fields": [ + { + "name": "f0", + "type": { + "name" : "f0", + "type" : "enum", + "symbols" : ["value1", "value2", "value3", "value4", "value5"] + } + }, + { + "name" : "f1", + "type" : { + "type" : "record", + "name" : "nestedRecord", + "fields": [ + {"name": "f1_0", "type": "string"}, + {"name": "f1_1", "type": "int"} + ] + } + }, + + {"name": "f2", "type": "string"}, + {"name": "f3", "type": "int"}, + {"name": "f4", "type": "boolean"}, + {"name": "f5", "type": "float"}, + {"name": "f6", "type": "double"}, + {"name": "f7", "type": "bytes"}, + {"name": "f8", "type": ["string", "int"]}, + { + "name": "f9", + "type": { + "name" : "f9", + "type" : "array", + "items" : "string" + } + }, + { + "name": "f10", + "type": { + "name" : "f10", + "type" : "map", + "values" : "string" + } + }, + { + "name": "f11", + "type": { + "type" : "fixed", + "name" : "f11", + "size" : 5 + } + } + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/test_map.avsc b/java/adapter/avro/target/test-classes/schema/test_map.avsc new file mode 100644 index 000000000000..0dfa3a595bb2 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_map.avsc @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "map", + "values": "string", + "name": "testMap" +} diff --git a/java/adapter/avro/target/test-classes/schema/test_nested_record.avsc b/java/adapter/avro/target/test-classes/schema/test_nested_record.avsc new file mode 100644 index 000000000000..29dddfd1adc6 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_nested_record.avsc @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "testNestedRecord", + "fields": [ + { + "name" : "f0", + "type" : { + "type" : "record", + "name" : "nestedInRecord", + "fields": [ + {"name": "f0", "type": "string"}, + {"name": "f1", "type": "int"} + ] + } + } + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/test_nullable_boolean.avsc b/java/adapter/avro/target/test-classes/schema/test_nullable_boolean.avsc new file mode 100644 index 000000000000..62af1a85d820 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_nullable_boolean.avsc @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "nullableBoolean", + "fields": [ + {"name": "f0", "type": ["null", "boolean"]} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/test_nullable_bytes.avsc b/java/adapter/avro/target/test-classes/schema/test_nullable_bytes.avsc new file mode 100644 index 000000000000..002bc7ce2c36 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_nullable_bytes.avsc @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "nullableBytes", + "fields": [ + {"name": "f0", "type": ["null", "bytes"]} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/test_nullable_double.avsc b/java/adapter/avro/target/test-classes/schema/test_nullable_double.avsc new file mode 100644 index 000000000000..642b7aa1622f --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_nullable_double.avsc @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "nullableDouble", + "fields": [ + {"name": "f0", "type": ["null", "double"]} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/test_nullable_float.avsc b/java/adapter/avro/target/test-classes/schema/test_nullable_float.avsc new file mode 100644 index 000000000000..dff285909b13 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_nullable_float.avsc @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "nullableFloat", + "fields": [ + {"name": "f0", "type": ["null", "float"]} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/test_nullable_int.avsc b/java/adapter/avro/target/test-classes/schema/test_nullable_int.avsc new file mode 100644 index 000000000000..abb2fc48a684 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_nullable_int.avsc @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "nullableInt", + "fields": [ + {"name": "f0", "type": ["null", "int"]} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/test_nullable_long.avsc b/java/adapter/avro/target/test-classes/schema/test_nullable_long.avsc new file mode 100644 index 000000000000..0624d2737114 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_nullable_long.avsc @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "nullableLong", + "fields": [ + {"name": "f0", "type": ["null", "long"]} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/test_nullable_string.avsc b/java/adapter/avro/target/test-classes/schema/test_nullable_string.avsc new file mode 100644 index 000000000000..347808ce6da1 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_nullable_string.avsc @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "nullableString", + "fields": [ + {"name": "f0", "type": ["null", "string"]} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/test_nullable_union.avsc b/java/adapter/avro/target/test-classes/schema/test_nullable_union.avsc new file mode 100644 index 000000000000..af94812d7637 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_nullable_union.avsc @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "testNullableUnions", + "fields": [ + {"name": "f0", "type": ["string", "int", "null"]} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/test_primitive_boolean.avsc b/java/adapter/avro/target/test-classes/schema/test_primitive_boolean.avsc new file mode 100644 index 000000000000..7652ce72385d --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_primitive_boolean.avsc @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "boolean", + "name": "TestBoolean" +} diff --git a/java/adapter/avro/target/test-classes/schema/test_primitive_bytes.avsc b/java/adapter/avro/target/test-classes/schema/test_primitive_bytes.avsc new file mode 100644 index 000000000000..5102430b65aa --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_primitive_bytes.avsc @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "bytes", + "name": "TestBytes" +} diff --git a/java/adapter/avro/target/test-classes/schema/test_primitive_double.avsc b/java/adapter/avro/target/test-classes/schema/test_primitive_double.avsc new file mode 100644 index 000000000000..d1ae0b605a93 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_primitive_double.avsc @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "double", + "name": "TestDouble" +} diff --git a/java/adapter/avro/target/test-classes/schema/test_primitive_enum.avsc b/java/adapter/avro/target/test-classes/schema/test_primitive_enum.avsc new file mode 100644 index 000000000000..bd8df61020eb --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_primitive_enum.avsc @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "enum", + "symbols" : ["SPADES", "HEARTS", "DIAMONDS", "CLUBS"], + "name": "testEnum" +} diff --git a/java/adapter/avro/target/test-classes/schema/test_primitive_float.avsc b/java/adapter/avro/target/test-classes/schema/test_primitive_float.avsc new file mode 100644 index 000000000000..675d1090d869 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_primitive_float.avsc @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "float", + "name": "TestFloat" +} diff --git a/java/adapter/avro/target/test-classes/schema/test_primitive_int.avsc b/java/adapter/avro/target/test-classes/schema/test_primitive_int.avsc new file mode 100644 index 000000000000..8fc8488281ad --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_primitive_int.avsc @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "int", + "name": "TestInt" +} diff --git a/java/adapter/avro/target/test-classes/schema/test_primitive_long.avsc b/java/adapter/avro/target/test-classes/schema/test_primitive_long.avsc new file mode 100644 index 000000000000..b9706107c09a --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_primitive_long.avsc @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "long", + "name": "TestLong" +} diff --git a/java/adapter/avro/target/test-classes/schema/test_primitive_string.avsc b/java/adapter/avro/target/test-classes/schema/test_primitive_string.avsc new file mode 100644 index 000000000000..b4a89a7f62cf --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_primitive_string.avsc @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "string", + "name": "TestString" +} diff --git a/java/adapter/avro/target/test-classes/schema/test_record.avsc b/java/adapter/avro/target/test-classes/schema/test_record.avsc new file mode 100644 index 000000000000..e83cf1180d20 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_record.avsc @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "testRecord", + "fields": [ + {"name": "f0", "type": "string"}, + {"name": "f1", "type": "int"}, + {"name": "f2", "type": "boolean"} + ] +} diff --git a/java/adapter/avro/target/test-classes/schema/test_union.avsc b/java/adapter/avro/target/test-classes/schema/test_union.avsc new file mode 100644 index 000000000000..f181e36e3c12 --- /dev/null +++ b/java/adapter/avro/target/test-classes/schema/test_union.avsc @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.avro", + "type": "record", + "name": "testUnions", + "fields": [ + {"name": "f0", "type": ["string", "int"]} + ] +} diff --git a/java/adapter/jdbc/target/classes/arrow-git.properties b/java/adapter/jdbc/target/classes/arrow-git.properties new file mode 100644 index 000000000000..fef7f2c20621 --- /dev/null +++ b/java/adapter/jdbc/target/classes/arrow-git.properties @@ -0,0 +1,26 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=21.08.2025 @ 12\:10\:15 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Ra\u00FAl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 diff --git a/java/adapter/jdbc/target/test-classes/h2/comment.sql b/java/adapter/jdbc/target/test-classes/h2/comment.sql new file mode 100644 index 000000000000..db8964fe1d4a --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/comment.sql @@ -0,0 +1,21 @@ +--Licensed to the Apache Software Foundation (ASF) under one or more contributor +--license agreements. See the NOTICE file distributed with this work for additional +--information regarding copyright ownership. The ASF licenses this file to +--You under the Apache License, Version 2.0 (the "License"); you may not use +--this file except in compliance with the License. You may obtain a copy of +--the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +--by applicable law or agreed to in writing, software distributed under the +--License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +--OF ANY KIND, either express or implied. See the License for the specific +--language governing permissions and limitations under the License. +create table table1( + id bigint primary key, + name varchar(255), + column1 boolean, + columnN int + ); + +COMMENT ON TABLE table1 IS 'This is super special table with valuable data'; +COMMENT ON COLUMN table1.id IS 'Record identifier'; +COMMENT ON COLUMN table1.name IS 'Name of record'; +COMMENT ON COLUMN table1.columnN IS 'Informative description of columnN'; \ No newline at end of file diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_all_datatypes_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_all_datatypes_h2.yml new file mode 100644 index 000000000000..c4f0017095df --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_all_datatypes_h2.yml @@ -0,0 +1,123 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_all_datatypes_h2' + +create: 'CREATE TABLE table1 (int_field1 INT, bool_field2 BOOLEAN, tinyint_field3 TINYINT, smallint_field4 SMALLINT, bigint_field5 BIGINT, + decimal_field6 DECIMAL(20,2), double_field7 DOUBLE, real_field8 REAL, time_field9 TIME, date_field10 DATE, timestamp_field11 TIMESTAMP, + binary_field12 VARBINARY(100), varchar_field13 VARCHAR(256), blob_field14 BLOB, clob_field15 CLOB, char_field16 CHAR(14), bit_field17 BIT, + null_field18 NULL, list_field19 INT ARRAY, map_field20 VARCHAR(256));' + +data: + - 'INSERT INTO table1 VALUES (101, 1, 45, 12000, 92233720, 17345667789.23, 56478356785.345, 56478356785.345, PARSEDATETIME(''12:45:35 GMT'', ''HH:mm:ss z''), + PARSEDATETIME(''2018-02-12 GMT'', ''yyyy-MM-dd z''), PARSEDATETIME(''2018-02-12 12:45:35 GMT'', ''yyyy-MM-dd HH:mm:ss z''), + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to varchar'', + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to clob'', ''some char text'', 1, null, ARRAY[1, 2, 3], ''{"a":"b","key":"12345"}'');' + + - 'INSERT INTO table1 VALUES (102, 1, 45, 12000, 92233720, 17345667789.23, 56478356785.345, 56478356785.345, PARSEDATETIME(''12:45:35 GMT'', ''HH:mm:ss z''), + PARSEDATETIME(''2018-02-12 GMT'', ''yyyy-MM-dd z''), PARSEDATETIME(''2018-02-12 12:45:35 GMT'', ''yyyy-MM-dd HH:mm:ss z''), + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to varchar'', + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to clob'', ''some char text'', 1, null, ARRAY[1, 2],''{"c":"d"}'');' + + - 'INSERT INTO table1 VALUES (103, 1, 45, 12000, 92233720, 17345667789.23, 56478356785.345, 56478356785.345, PARSEDATETIME(''12:45:35 GMT'', ''HH:mm:ss z''), + PARSEDATETIME(''2018-02-12 GMT'', ''yyyy-MM-dd z''), PARSEDATETIME(''2018-02-12 12:45:35 GMT'', ''yyyy-MM-dd HH:mm:ss z''), + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to varchar'', + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to clob'', ''some char text'', 1, null, ARRAY[1],''{"e":"f"}'');' + + - 'INSERT INTO table1 VALUES (104, 1, 45, 12000, 92233720, 17345667789.23, 56478356785.345, 56478356785.345, PARSEDATETIME(''12:45:35 GMT'', ''HH:mm:ss z''), + PARSEDATETIME(''2018-02-12 GMT'', ''yyyy-MM-dd z''), PARSEDATETIME(''2018-02-12 12:45:35 GMT'', ''yyyy-MM-dd HH:mm:ss z''), + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to varchar'', + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to clob'', ''some char text'', 1, null, ARRAY[2, 3, 4],''{"g":"h"}'');' + + - 'INSERT INTO table1 VALUES (null, 1, 45, 12000, 92233720, 17345667789.23, 56478356785.345, 56478356785.345, PARSEDATETIME(''12:45:35 GMT'', ''HH:mm:ss z''), + PARSEDATETIME(''2018-02-12 GMT'', ''yyyy-MM-dd z''), PARSEDATETIME(''2018-02-12 12:45:35 GMT'', ''yyyy-MM-dd HH:mm:ss z''), + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to varchar'', + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to clob'', ''some char text'', 1, null, ARRAY[2, 3],''{"i":"j"}'');' + + - 'INSERT INTO table1 VALUES (null, 1, 45, 12000, 92233720, 17345667789.23, 56478356785.345, 56478356785.345, PARSEDATETIME(''12:45:35 GMT'', ''HH:mm:ss z''), + PARSEDATETIME(''2018-02-12 GMT'', ''yyyy-MM-dd z''), PARSEDATETIME(''2018-02-12 12:45:35 GMT'', ''yyyy-MM-dd HH:mm:ss z''), + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to varchar'', + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to clob'', ''some char text'', 1, null, ARRAY[2],''{"k":"l"}'');' + + - 'INSERT INTO table1 VALUES (107, 1, 45, 12000, 92233720, 17345667789.23, 56478356785.345, 56478356785.345, PARSEDATETIME(''12:45:35 GMT'', ''HH:mm:ss z''), + PARSEDATETIME(''2018-02-12 GMT'', ''yyyy-MM-dd z''), PARSEDATETIME(''2018-02-12 12:45:35 GMT'', ''yyyy-MM-dd HH:mm:ss z''), + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to varchar'', + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to clob'', ''some char text'', 1, null, ARRAY[3, 4, 5],''{"m":"n"}'');' + + - 'INSERT INTO table1 VALUES (108, 1, 45, 12000, 92233720, 17345667789.23, 56478356785.345, 56478356785.345, PARSEDATETIME(''12:45:35 GMT'', ''HH:mm:ss z''), + PARSEDATETIME(''2018-02-12 GMT'', ''yyyy-MM-dd z''), PARSEDATETIME(''2018-02-12 12:45:35 GMT'', ''yyyy-MM-dd HH:mm:ss z''), + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to varchar'', + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to clob'', ''some char text'', 1, null, ARRAY[3, 4],''{"o":"p"}'');' + + - 'INSERT INTO table1 VALUES (109, 1, 45, 12000, 92233720, 17345667789.23, 56478356785.345, 56478356785.345, PARSEDATETIME(''12:45:35 GMT'', ''HH:mm:ss z''), + PARSEDATETIME(''2018-02-12 GMT'', ''yyyy-MM-dd z''), PARSEDATETIME(''2018-02-12 12:45:35 GMT'', ''yyyy-MM-dd HH:mm:ss z''), + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to varchar'', + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to clob'', ''some char text'', 1, null, ARRAY[3],''{"q":"r"}'');' + + - 'INSERT INTO table1 VALUES (110, 1, 45, 12000, 92233720, 17345667789.23, 56478356785.345, 56478356785.345, PARSEDATETIME(''12:45:35 GMT'', ''HH:mm:ss z''), + PARSEDATETIME(''2018-02-12 GMT'', ''yyyy-MM-dd z''), PARSEDATETIME(''2018-02-12 12:45:35 GMT'', ''yyyy-MM-dd HH:mm:ss z''), + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to varchar'', + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to clob'', ''some char text'', 1, null, ARRAY[],''{"s":"t"}'');' + +query: 'select int_field1, bool_field2, tinyint_field3, smallint_field4, bigint_field5, decimal_field6, double_field7, real_field8, + time_field9, date_field10, timestamp_field11, binary_field12, varchar_field13, blob_field14, clob_field15, char_field16, bit_field17, null_field18, list_field19, map_field20 from table1' + +drop: 'DROP table table1;' + +rowCount: '10' + +values: + - 'INT_FIELD1=101,102,103,104,null,null,107,108,109,110' + - 'BOOL_FIELD2=1,1,1,1,1,1,1,1,1,1' + - 'BIT_FIELD17=1,1,1,1,1,1,1,1,1,1' + - 'TINYINT_FIELD3=45,45,45,45,45,45,45,45,45,45' + - 'SMALLINT_FIELD4=12000,12000,12000,12000,12000,12000,12000,12000,12000,12000' + - 'BIGINT_FIELD5=92233720,92233720,92233720,92233720,92233720,92233720,92233720,92233720,92233720,92233720' + - 'REAL_FIELD8=56478356785.345f,56478356785.345f,56478356785.345f,56478356785.345f,56478356785.345f,56478356785.345f,56478356785.345f,56478356785.345f,56478356785.345f,56478356785.345f' + - 'DECIMAL_FIELD6=17345667789.23,17345667789.23,17345667789.23,17345667789.23,17345667789.23,17345667789.23,17345667789.23,17345667789.23,17345667789.23,17345667789.23' + - 'DOUBLE_FIELD7=56478356785.345,56478356785.345,56478356785.345,56478356785.345,56478356785.345,56478356785.345,56478356785.345,56478356785.345,56478356785.345,56478356785.345' + - 'TIME_FIELD9=45935000,45935000,45935000,45935000,45935000,45935000,45935000,45935000,45935000,45935000' + - 'DATE_FIELD10=17574,17574,17574,17574,17574,17574,17574,17574,17574,17574' + - 'TIMESTAMP_FIELD11=1518439535000,1518439535000,1518439535000,1518439535000,1518439535000,1518439535000,1518439535000,1518439535000,1518439535000,1518439535000' + - 'CHAR_FIELD16=some char text,some char text,some char text,some char text,some char text, + some char text,some char text,some char text,some char text,some char text' + - 'VARCHAR_FIELD13=some text that needs to be converted to varchar,some text that needs to be converted to varchar, + some text that needs to be converted to varchar,some text that needs to be converted to varchar, + some text that needs to be converted to varchar,some text that needs to be converted to varchar, + some text that needs to be converted to varchar,some text that needs to be converted to varchar, + some text that needs to be converted to varchar,some text that needs to be converted to varchar' + - 'BINARY_FIELD12=736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - 'BLOB_FIELD14=736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + 736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - 'CLOB_FIELD15=some text that needs to be converted to clob,some text that needs to be converted to clob, + some text that needs to be converted to clob,some text that needs to be converted to clob, + some text that needs to be converted to clob,some text that needs to be converted to clob, + some text that needs to be converted to clob,some text that needs to be converted to clob, + some text that needs to be converted to clob,some text that needs to be converted to clob' + - 'LIST_FIELD19=(1;2;3),(1;2),(1),(2;3;4),(2;3),(2),(3;4;5),(3;4),(3),()' + - 'MAP_FIELD20={"a":"b"|"key":"12345"},{"c":"d"},{"e":"f"},{"g":"h"},{"i":"j"},{"k":"l"},{"m":"n"},{"o":"p"},{"q":"r"},{"s":"t"}' \ No newline at end of file diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_all_datatypes_null_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_all_datatypes_null_h2.yml new file mode 100644 index 000000000000..9be76229dab8 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_all_datatypes_null_h2.yml @@ -0,0 +1,55 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_all_datatypes_null_h2' + +type: 'null' + +vectors: + - 'INT_FIELD1' + - 'BOOL_FIELD2' + - 'TINYINT_FIELD3' + - 'SMALLINT_FIELD4' + - 'BIGINT_FIELD5' + - 'DECIMAL_FIELD6' + - 'DOUBLE_FIELD7' + - 'REAL_FIELD8' + - 'TIME_FIELD9' + - 'DATE_FIELD10' + - 'TIMESTAMP_FIELD11' + - 'BINARY_FIELD12' + - 'VARCHAR_FIELD13' + - 'BLOB_FIELD14' + - 'CLOB_FIELD15' + - 'CHAR_FIELD16' + - 'BIT_FIELD17' + - 'LIST_FIELD19' + - 'MAP_FIELD20' + +rowCount: '5' + +create: 'CREATE TABLE table1 (int_field1 INT, bool_field2 BOOLEAN, tinyint_field3 TINYINT, smallint_field4 SMALLINT, bigint_field5 BIGINT, + decimal_field6 DECIMAL(20,2), double_field7 DOUBLE, real_field8 REAL, time_field9 TIME, date_field10 DATE, timestamp_field11 TIMESTAMP, + binary_field12 VARBINARY(100), varchar_field13 VARCHAR(256), blob_field14 BLOB, clob_field15 CLOB, char_field16 CHAR(14), bit_field17 BIT, + list_field19 INT ARRAY, map_field20 VARCHAR(256));' + +data: + - 'INSERT INTO table1 VALUES (null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null);' + - 'INSERT INTO table1 VALUES (null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null);' + - 'INSERT INTO table1 VALUES (null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null);' + - 'INSERT INTO table1 VALUES (null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null);' + - 'INSERT INTO table1 VALUES (null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null);' + +query: 'select int_field1, bool_field2, tinyint_field3, smallint_field4, bigint_field5, decimal_field6, double_field7, real_field8, + time_field9, date_field10, timestamp_field11, binary_field12, varchar_field13, blob_field14, clob_field15, char_field16, bit_field17, + list_field19 from table1' + +drop: 'DROP table table1;' \ No newline at end of file diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_all_datatypes_selected_null_rows_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_all_datatypes_selected_null_rows_h2.yml new file mode 100644 index 000000000000..fda31da15077 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_all_datatypes_selected_null_rows_h2.yml @@ -0,0 +1,91 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_all_datatypes_selected_null_rows_h2' + +type: 'selected_null_row' + +vectors: + - 'INT_FIELD1' + - 'BOOL_FIELD2' + - 'TINYINT_FIELD3' + - 'SMALLINT_FIELD4' + - 'BIGINT_FIELD5' + - 'DECIMAL_FIELD6' + - 'DOUBLE_FIELD7' + - 'REAL_FIELD8' + - 'TIME_FIELD9' + - 'DATE_FIELD10' + - 'TIMESTAMP_FIELD11' + - 'BINARY_FIELD12' + - 'VARCHAR_FIELD13' + - 'BLOB_FIELD14' + - 'CLOB_FIELD15' + - 'CHAR_FIELD16' + - 'BIT_FIELD17' + - 'LIST_FIELD19' + - 'MAP_FIELD20' + +create: 'CREATE TABLE table1 (int_field1 INT, bool_field2 BOOLEAN, tinyint_field3 TINYINT, smallint_field4 SMALLINT, bigint_field5 BIGINT, + decimal_field6 DECIMAL(20,2), double_field7 DOUBLE, real_field8 REAL, time_field9 TIME, date_field10 DATE, timestamp_field11 TIMESTAMP, + binary_field12 VARBINARY(100), varchar_field13 VARCHAR(256), blob_field14 BLOB, clob_field15 CLOB, char_field16 CHAR(14), bit_field17 BIT, + list_field19 INT ARRAY, map_field20 VARCHAR(256));' + +data: + - 'INSERT INTO table1 VALUES (null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null);' + + - 'INSERT INTO table1 VALUES (101, 1, 45, 12000, 92233720, 17345667789.23, 56478356785.345, 56478356785.345, PARSEDATETIME(''12:45:35 GMT'', ''HH:mm:ss z''), + PARSEDATETIME(''2018-02-12 GMT'', ''yyyy-MM-dd z''), PARSEDATETIME(''2018-02-12 12:45:35 GMT'', ''yyyy-MM-dd HH:mm:ss z''), + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to varchar'', + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to clob'', ''some char text'', + 1, ARRAY[1, 2, 3],''{"a":"b"}'');' + + - 'INSERT INTO table1 VALUES (null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null);' + + - 'INSERT INTO table1 VALUES (101, 1, 45, 12000, 92233720, 17345667789.23, 56478356785.345, 56478356785.345, PARSEDATETIME(''12:45:35 GMT'', ''HH:mm:ss z''), + PARSEDATETIME(''2018-02-12 GMT'', ''yyyy-MM-dd z''), PARSEDATETIME(''2018-02-12 12:45:35 GMT'', ''yyyy-MM-dd HH:mm:ss z''), + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to varchar'', + ''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'', ''some text that needs to be converted to clob'', ''some char text'', + 1, ARRAY[1, 2, 3],''{"c":"d"}'');' + + - 'INSERT INTO table1 VALUES (null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null);' + +query: 'select int_field1, bool_field2, tinyint_field3, smallint_field4, bigint_field5, decimal_field6, double_field7, real_field8, + time_field9, date_field10, timestamp_field11, binary_field12, varchar_field13, blob_field14, clob_field15, char_field16, bit_field17, + list_field19, map_field20 from table1' + +drop: 'DROP table table1;' + +rowCount: '5' + +values: + - 'INT_FIELD1=null,101,null,101,null' + - 'BOOL_FIELD2=null,1,null,1,null' + - 'BIT_FIELD17=null,1,null,1,null' + - 'TINYINT_FIELD3=null,45,null,45,null' + - 'SMALLINT_FIELD4=null,12000,null,12000,null' + - 'BIGINT_FIELD5=null,92233720,null,92233720,null' + - 'REAL_FIELD8=null,56478356785.345f,null,56478356785.345f,null' + - 'DECIMAL_FIELD6=null,17345667789.23,null,17345667789.23,null' + - 'DOUBLE_FIELD7=null,56478356785.345,null,56478356785.345,null' + - 'TIME_FIELD9=null,45935000,null,45935000,null' + - 'DATE_FIELD10=null,17574,null,17574,null' + - 'TIMESTAMP_FIELD11=null,1518439535000,null,1518439535000,null' + - 'CHAR_FIELD16=null,some char text,null,some char text,null' + - 'VARCHAR_FIELD13=null,some text that needs to be converted to varchar,null, + some text that needs to be converted to varchar,null' + - 'BINARY_FIELD12=null,736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + null,736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279,null' + - 'BLOB_FIELD14=null,736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279, + null,736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279,null' + - 'CLOB_FIELD15=null,some text that needs to be converted to clob,null,some text that needs to be converted to clob,null' + - 'LIST_FIELD19=null,(1;2;3),null,(1;2;3),null' + - 'MAP_FIELD20=null,{"a":"b"},null,{"c":"d"},null' diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_bigint_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_bigint_h2.yml new file mode 100644 index 000000000000..d9b39dfa204a --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_bigint_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_bigint_h2' + +type: 'big_int' + +vector: 'BIGINT_FIELD5' + +create: 'CREATE TABLE table1 (bigint_field5 BIGINT);' + +data: + - 'INSERT INTO table1 VALUES (92233720);' + - 'INSERT INTO table1 VALUES (92233720);' + - 'INSERT INTO table1 VALUES (92233720);' + - 'INSERT INTO table1 VALUES (92233720);' + - 'INSERT INTO table1 VALUES (92233720);' + - 'INSERT INTO table1 VALUES (92233720);' + - 'INSERT INTO table1 VALUES (92233720);' + - 'INSERT INTO table1 VALUES (92233720);' + - 'INSERT INTO table1 VALUES (92233720);' + - 'INSERT INTO table1 VALUES (92233720);' + +query: 'select bigint_field5 from table1;' + +drop: 'DROP table table1;' + +values: + - '92233720' + - '92233720' + - '92233720' + - '92233720' + - '92233720' + - '92233720' + - '92233720' + - '92233720' + - '92233720' + - '92233720' diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_binary_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_binary_h2.yml new file mode 100644 index 000000000000..3d7b1ec658ef --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_binary_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_binary_h2' + +type: 'binary' + +vector: 'BINARY_FIELD12' + +create: 'CREATE TABLE table1 (binary_field12 VARBINARY(100));' + +data: + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + +query: 'select binary_field12 from table1;' + +drop: 'DROP table table1;' + +values: + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_bit_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_bit_h2.yml new file mode 100644 index 000000000000..7e6e07cab3d9 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_bit_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_bit_h2' + +type: 'bit' + +vector: 'BIT_FIELD17' + +create: 'CREATE TABLE table1 (bit_field17 BIT);' + +data: + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + +query: 'select bit_field17 from table1;' + +drop: 'DROP table table1;' + +values: + - '1' + - '1' + - '1' + - '1' + - '1' + - '1' + - '1' + - '1' + - '1' + - '1' diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_blob_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_blob_h2.yml new file mode 100644 index 000000000000..df445120f675 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_blob_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_blob_h2' + +type: 'blob' + +vector: 'BLOB_FIELD14' + +create: 'CREATE TABLE table1 (blob_field14 BLOB);' + +data: + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + - 'INSERT INTO table1 VALUES (''736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279'');' + +query: 'select blob_field14 from table1;' + +drop: 'DROP table table1;' + +values: + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' + - '736f6d6520746578742074686174206e6565647320746f20626520636f6e76657274656420746f2062696e617279' diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_bool_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_bool_h2.yml new file mode 100644 index 000000000000..2284b9fae4a4 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_bool_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_bool_h2' + +type: 'bool' + +vector: 'BOOL_FIELD2' + +create: 'CREATE TABLE table1 (bool_field2 BOOLEAN);' + +data: + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + - 'INSERT INTO table1 VALUES (1);' + +query: 'select bool_field2 from table1;' + +drop: 'DROP table table1;' + +values: + - 'true' + - 'true' + - 'true' + - 'true' + - 'true' + - 'true' + - 'true' + - 'true' + - 'true' + - 'true' diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_char_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_char_h2.yml new file mode 100644 index 000000000000..588df7bff4df --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_char_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_char_h2' + +type: 'char' + +vector: 'CHAR_FIELD16' + +create: 'CREATE TABLE table1 (char_field16 CHAR(14));' + +data: + - 'INSERT INTO table1 VALUES (''some char text'');' + - 'INSERT INTO table1 VALUES (''some char text'');' + - 'INSERT INTO table1 VALUES (''some char text'');' + - 'INSERT INTO table1 VALUES (''some char text'');' + - 'INSERT INTO table1 VALUES (''some char text'');' + - 'INSERT INTO table1 VALUES (''some char text'');' + - 'INSERT INTO table1 VALUES (''some char text'');' + - 'INSERT INTO table1 VALUES (''some char text'');' + - 'INSERT INTO table1 VALUES (''some char text'');' + - 'INSERT INTO table1 VALUES (''some char text'');' + +query: 'select char_field16 from table1;' + +drop: 'DROP table table1;' + +values: + - 'some char text' + - 'some char text' + - 'some char text' + - 'some char text' + - 'some char text' + - 'some char text' + - 'some char text' + - 'some char text' + - 'some char text' + - 'some char text' diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_charset_ch_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_charset_ch_h2.yml new file mode 100644 index 000000000000..2e60a4af5a97 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_charset_ch_h2.yml @@ -0,0 +1,43 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_charset_ch_h2' + +type: 'charset' + +vectors: + - 'VARCHAR_FIELD13' + - 'CHAR_FIELD16' + - 'CLOB_FIELD15' + +rowCount: '5' + +charSet: 'GBK' + +create: 'CREATE TABLE table1 (int_field1 INT, varchar_field13 VARCHAR(256), clob_field15 CLOB, char_field16 CHAR(13));' + +data: + - 'INSERT INTO table1 VALUES (101,''一些帶有char編碼的文本需è¦è½‰æ›ç‚ºvarchar'', ''一些带有charç¼–ç çš„æ–‡æœ¬éœ€è¦è½¬æ¢ä¸ºclob'', ''一些charç¼–ç çš„字符文本'');' + - 'INSERT INTO table1 VALUES (101,''一些帶有char編碼的文本需è¦è½‰æ›ç‚ºvarchar'', ''一些带有charç¼–ç çš„æ–‡æœ¬éœ€è¦è½¬æ¢ä¸ºclob'', ''一些charç¼–ç çš„字符文本'');' + - 'INSERT INTO table1 VALUES (101,''一些帶有char編碼的文本需è¦è½‰æ›ç‚ºvarchar'', ''一些带有charç¼–ç çš„æ–‡æœ¬éœ€è¦è½¬æ¢ä¸ºclob'', ''一些charç¼–ç çš„字符文本'');' + - 'INSERT INTO table1 VALUES (101,''一些帶有char編碼的文本需è¦è½‰æ›ç‚ºvarchar'', ''一些带有charç¼–ç çš„æ–‡æœ¬éœ€è¦è½¬æ¢ä¸ºclob'', ''一些charç¼–ç çš„字符文本'');' + - 'INSERT INTO table1 VALUES (101,''一些帶有char編碼的文本需è¦è½‰æ›ç‚ºvarchar'', ''一些带有charç¼–ç çš„æ–‡æœ¬éœ€è¦è½¬æ¢ä¸ºclob'', ''一些charç¼–ç çš„字符文本'');' + +query: 'select varchar_field13, clob_field15, char_field16 from table1' + +drop: 'DROP table table1;' + +values: + - 'VARCHAR_FIELD13=一些帶有char編碼的文本需è¦è½‰æ›ç‚ºvarchar,一些帶有char編碼的文本需è¦è½‰æ›ç‚ºvarchar,一些帶有char編碼的文本需è¦è½‰æ›ç‚ºvarchar, + 一些帶有char編碼的文本需è¦è½‰æ›ç‚ºvarchar,一些帶有char編碼的文本需è¦è½‰æ›ç‚ºvarchar' + - 'CLOB_FIELD15=一些带有charç¼–ç çš„æ–‡æœ¬éœ€è¦è½¬æ¢ä¸ºclob,一些带有charç¼–ç çš„æ–‡æœ¬éœ€è¦è½¬æ¢ä¸ºclob,一些带有charç¼–ç çš„æ–‡æœ¬éœ€è¦è½¬æ¢ä¸ºclob, + 一些带有charç¼–ç çš„æ–‡æœ¬éœ€è¦è½¬æ¢ä¸ºclob,一些带有charç¼–ç çš„æ–‡æœ¬éœ€è¦è½¬æ¢ä¸ºclob' + - 'CHAR_FIELD16=一些charç¼–ç çš„字符文本,一些charç¼–ç çš„字符文本,一些charç¼–ç çš„字符文本,一些charç¼–ç çš„字符文本,一些charç¼–ç çš„字符文本' \ No newline at end of file diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_charset_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_charset_h2.yml new file mode 100644 index 000000000000..383681e5b3b4 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_charset_h2.yml @@ -0,0 +1,53 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_charset_h2' + +type: 'charset' + +vectors: + - 'VARCHAR_FIELD13' + - 'CHAR_FIELD16' + - 'CLOB_FIELD15' + +rowCount: '10' + +create: 'CREATE TABLE table1 (int_field1 INT, varchar_field13 VARCHAR(256), clob_field15 CLOB, char_field16 CHAR(33));' + +data: + - 'INSERT INTO table1 VALUES (101,''some text with char encoding that needs to be converted to varchar'', ''some text with char encoding that needs to be converted to clob'', ''some char text with char encoding'');' + - 'INSERT INTO table1 VALUES (101,''some text with char encoding that needs to be converted to varchar'', ''some text with char encoding that needs to be converted to clob'', ''some char text with char encoding'');' + - 'INSERT INTO table1 VALUES (101,''some text with char encoding that needs to be converted to varchar'', ''some text with char encoding that needs to be converted to clob'', ''some char text with char encoding'');' + - 'INSERT INTO table1 VALUES (101,''some text with char encoding that needs to be converted to varchar'', ''some text with char encoding that needs to be converted to clob'', ''some char text with char encoding'');' + - 'INSERT INTO table1 VALUES (101,''some text with char encoding that needs to be converted to varchar'', ''some text with char encoding that needs to be converted to clob'', ''some char text with char encoding'');' + - 'INSERT INTO table1 VALUES (101,''some text with char encoding that needs to be converted to varchar'', ''some text with char encoding that needs to be converted to clob'', ''some char text with char encoding'');' + - 'INSERT INTO table1 VALUES (101,''some text with char encoding that needs to be converted to varchar'', ''some text with char encoding that needs to be converted to clob'', ''some char text with char encoding'');' + - 'INSERT INTO table1 VALUES (101,''some text with char encoding that needs to be converted to varchar'', ''some text with char encoding that needs to be converted to clob'', ''some char text with char encoding'');' + - 'INSERT INTO table1 VALUES (101,''some text with char encoding that needs to be converted to varchar'', ''some text with char encoding that needs to be converted to clob'', ''some char text with char encoding'');' + - 'INSERT INTO table1 VALUES (101,''some text with char encoding that needs to be converted to varchar'', ''some text with char encoding that needs to be converted to clob'', ''some char text with char encoding'');' + +query: 'select varchar_field13, clob_field15, char_field16 from table1' + +drop: 'DROP table table1;' + +values: + - 'VARCHAR_FIELD13=some text with char encoding that needs to be converted to varchar,some text with char encoding that needs to be converted to varchar, + some text with char encoding that needs to be converted to varchar,some text with char encoding that needs to be converted to varchar, + some text with char encoding that needs to be converted to varchar,some text with char encoding that needs to be converted to varchar, + some text with char encoding that needs to be converted to varchar,some text with char encoding that needs to be converted to varchar, + some text with char encoding that needs to be converted to varchar,some text with char encoding that needs to be converted to varchar' + - 'CLOB_FIELD15=some text with char encoding that needs to be converted to clob,some text with char encoding that needs to be converted to clob, + some text with char encoding that needs to be converted to clob,some text with char encoding that needs to be converted to clob, + some text with char encoding that needs to be converted to clob,some text with char encoding that needs to be converted to clob, + some text with char encoding that needs to be converted to clob,some text with char encoding that needs to be converted to clob, + some text with char encoding that needs to be converted to clob,some text with char encoding that needs to be converted to clob' + - 'CHAR_FIELD16=some char text with char encoding,some char text with char encoding,some char text with char encoding,some char text with char encoding,some char text with char encoding, + some char text with char encoding,some char text with char encoding,some char text with char encoding,some char text with char encoding,some char text with char encoding' \ No newline at end of file diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_charset_jp_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_charset_jp_h2.yml new file mode 100644 index 000000000000..9b3cf9a18fe0 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_charset_jp_h2.yml @@ -0,0 +1,43 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_charset_jp_h2' + +type: 'charset' + +vectors: + - 'VARCHAR_FIELD13' + - 'CHAR_FIELD16' + - 'CLOB_FIELD15' + +rowCount: '5' + +charSet: 'SJIS' + +create: 'CREATE TABLE table1 (int_field1 INT, varchar_field13 VARCHAR(256), clob_field15 CLOB, char_field16 CHAR(23));' + +data: + - 'INSERT INTO table1 VALUES (101,''varcharã«å¤‰æ›ã™ã‚‹å¿…è¦ãŒã‚ã‚‹charエンコーディングã®ãƒ†ã‚­ã‚¹ãƒˆ'', ''charエンコーディングã®ã‚るテキストをclobã«å¤‰æ›ã™ã‚‹å¿…è¦ãŒã‚ã‚‹'', ''charエンコーディングã®ã‚ã‚‹charテキスト'');' + - 'INSERT INTO table1 VALUES (101,''varcharã«å¤‰æ›ã™ã‚‹å¿…è¦ãŒã‚ã‚‹charエンコーディングã®ãƒ†ã‚­ã‚¹ãƒˆ'', ''charエンコーディングã®ã‚るテキストをclobã«å¤‰æ›ã™ã‚‹å¿…è¦ãŒã‚ã‚‹'', ''charエンコーディングã®ã‚ã‚‹charテキスト'');' + - 'INSERT INTO table1 VALUES (101,''varcharã«å¤‰æ›ã™ã‚‹å¿…è¦ãŒã‚ã‚‹charエンコーディングã®ãƒ†ã‚­ã‚¹ãƒˆ'', ''charエンコーディングã®ã‚るテキストをclobã«å¤‰æ›ã™ã‚‹å¿…è¦ãŒã‚ã‚‹'', ''charエンコーディングã®ã‚ã‚‹charテキスト'');' + - 'INSERT INTO table1 VALUES (101,''varcharã«å¤‰æ›ã™ã‚‹å¿…è¦ãŒã‚ã‚‹charエンコーディングã®ãƒ†ã‚­ã‚¹ãƒˆ'', ''charエンコーディングã®ã‚るテキストをclobã«å¤‰æ›ã™ã‚‹å¿…è¦ãŒã‚ã‚‹'', ''charエンコーディングã®ã‚ã‚‹charテキスト'');' + - 'INSERT INTO table1 VALUES (101,''varcharã«å¤‰æ›ã™ã‚‹å¿…è¦ãŒã‚ã‚‹charエンコーディングã®ãƒ†ã‚­ã‚¹ãƒˆ'', ''charエンコーディングã®ã‚るテキストをclobã«å¤‰æ›ã™ã‚‹å¿…è¦ãŒã‚ã‚‹'', ''charエンコーディングã®ã‚ã‚‹charテキスト'');' + +query: 'select varchar_field13, clob_field15, char_field16 from table1' + +drop: 'DROP table table1;' + +values: + - 'VARCHAR_FIELD13=varcharã«å¤‰æ›ã™ã‚‹å¿…è¦ãŒã‚ã‚‹charエンコーディングã®ãƒ†ã‚­ã‚¹ãƒˆ,varcharã«å¤‰æ›ã™ã‚‹å¿…è¦ãŒã‚ã‚‹charエンコーディングã®ãƒ†ã‚­ã‚¹ãƒˆ,varcharã«å¤‰æ›ã™ã‚‹å¿…è¦ãŒã‚ã‚‹charエンコーディングã®ãƒ†ã‚­ã‚¹ãƒˆ, + varcharã«å¤‰æ›ã™ã‚‹å¿…è¦ãŒã‚ã‚‹charエンコーディングã®ãƒ†ã‚­ã‚¹ãƒˆ,varcharã«å¤‰æ›ã™ã‚‹å¿…è¦ãŒã‚ã‚‹charエンコーディングã®ãƒ†ã‚­ã‚¹ãƒˆ' + - 'CLOB_FIELD15=charエンコーディングã®ã‚るテキストをclobã«å¤‰æ›ã™ã‚‹å¿…è¦ãŒã‚ã‚‹,charエンコーディングã®ã‚るテキストをclobã«å¤‰æ›ã™ã‚‹å¿…è¦ãŒã‚ã‚‹,charエンコーディングã®ã‚るテキストをclobã«å¤‰æ›ã™ã‚‹å¿…è¦ãŒã‚ã‚‹, + charエンコーディングã®ã‚るテキストをclobã«å¤‰æ›ã™ã‚‹å¿…è¦ãŒã‚ã‚‹,charエンコーディングã®ã‚るテキストをclobã«å¤‰æ›ã™ã‚‹å¿…è¦ãŒã‚ã‚‹' + - 'CHAR_FIELD16=charエンコーディングã®ã‚ã‚‹charテキスト,charエンコーディングã®ã‚ã‚‹charテキスト,charエンコーディングã®ã‚ã‚‹charテキスト,charエンコーディングã®ã‚ã‚‹charテキスト,charエンコーディングã®ã‚ã‚‹charテキスト' \ No newline at end of file diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_charset_kr_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_charset_kr_h2.yml new file mode 100644 index 000000000000..d6e051c094fb --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_charset_kr_h2.yml @@ -0,0 +1,43 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_charset_kr_h2' + +type: 'charset' + +vectors: + - 'VARCHAR_FIELD13' + - 'CHAR_FIELD16' + - 'CLOB_FIELD15' + +rowCount: '5' + +charSet: 'EUC-KR' + +create: 'CREATE TABLE table1 (int_field1 INT, varchar_field13 VARCHAR(256), clob_field15 CLOB, char_field16 CHAR(22));' + +data: + - 'INSERT INTO table1 VALUES (101,''char ì¸ì½”ë”©ì„ ì‚¬ìš©í•˜ëŠ” ì¼ë¶€ í…스트를 varchar로 변환해야합니다.'', ''clob로 변환해야하는 char ì¸ì½”ë”©ì„ ê°€ì§„ í…스트'', ''char ì¸ì½”ë”©ì„ ì‚¬ìš©í•œ char í…스트'');' + - 'INSERT INTO table1 VALUES (101,''char ì¸ì½”ë”©ì„ ì‚¬ìš©í•˜ëŠ” ì¼ë¶€ í…스트를 varchar로 변환해야합니다.'', ''clob로 변환해야하는 char ì¸ì½”ë”©ì„ ê°€ì§„ í…스트'', ''char ì¸ì½”ë”©ì„ ì‚¬ìš©í•œ char í…스트'');' + - 'INSERT INTO table1 VALUES (101,''char ì¸ì½”ë”©ì„ ì‚¬ìš©í•˜ëŠ” ì¼ë¶€ í…스트를 varchar로 변환해야합니다.'', ''clob로 변환해야하는 char ì¸ì½”ë”©ì„ ê°€ì§„ í…스트'', ''char ì¸ì½”ë”©ì„ ì‚¬ìš©í•œ char í…스트'');' + - 'INSERT INTO table1 VALUES (101,''char ì¸ì½”ë”©ì„ ì‚¬ìš©í•˜ëŠ” ì¼ë¶€ í…스트를 varchar로 변환해야합니다.'', ''clob로 변환해야하는 char ì¸ì½”ë”©ì„ ê°€ì§„ í…스트'', ''char ì¸ì½”ë”©ì„ ì‚¬ìš©í•œ char í…스트'');' + - 'INSERT INTO table1 VALUES (101,''char ì¸ì½”ë”©ì„ ì‚¬ìš©í•˜ëŠ” ì¼ë¶€ í…스트를 varchar로 변환해야합니다.'', ''clob로 변환해야하는 char ì¸ì½”ë”©ì„ ê°€ì§„ í…스트'', ''char ì¸ì½”ë”©ì„ ì‚¬ìš©í•œ char í…스트'');' + +query: 'select varchar_field13, clob_field15, char_field16 from table1' + +drop: 'DROP table table1;' + +values: + - 'VARCHAR_FIELD13=char ì¸ì½”ë”©ì„ ì‚¬ìš©í•˜ëŠ” ì¼ë¶€ í…스트를 varchar로 변환해야합니다.,char ì¸ì½”ë”©ì„ ì‚¬ìš©í•˜ëŠ” ì¼ë¶€ í…스트를 varchar로 변환해야합니다.,char ì¸ì½”ë”©ì„ ì‚¬ìš©í•˜ëŠ” ì¼ë¶€ í…스트를 varchar로 변환해야합니다., + char ì¸ì½”ë”©ì„ ì‚¬ìš©í•˜ëŠ” ì¼ë¶€ í…스트를 varchar로 변환해야합니다.,char ì¸ì½”ë”©ì„ ì‚¬ìš©í•˜ëŠ” ì¼ë¶€ í…스트를 varchar로 변환해야합니다.' + - 'CLOB_FIELD15=clob로 변환해야하는 char ì¸ì½”ë”©ì„ ê°€ì§„ í…스트,clob로 변환해야하는 char ì¸ì½”ë”©ì„ ê°€ì§„ í…스트,clob로 변환해야하는 char ì¸ì½”ë”©ì„ ê°€ì§„ í…스트, + clob로 변환해야하는 char ì¸ì½”ë”©ì„ ê°€ì§„ í…스트,clob로 변환해야하는 char ì¸ì½”ë”©ì„ ê°€ì§„ í…스트' + - 'CHAR_FIELD16=char ì¸ì½”ë”©ì„ ì‚¬ìš©í•œ char í…스트,char ì¸ì½”ë”©ì„ ì‚¬ìš©í•œ char í…스트,char ì¸ì½”ë”©ì„ ì‚¬ìš©í•œ char í…스트,char ì¸ì½”ë”©ì„ ì‚¬ìš©í•œ char í…스트,char ì¸ì½”ë”©ì„ ì‚¬ìš©í•œ char í…스트' \ No newline at end of file diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_clob_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_clob_h2.yml new file mode 100644 index 000000000000..ca9f2a41973c --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_clob_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_clob_h2' + +type: 'clob' + +vector: 'CLOB_FIELD15' + +create: 'CREATE TABLE table1 (clob_field15 CLOB);' + +data: + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to clob'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to clob'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to clob'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to clob'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to clob'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to clob'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to clob'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to clob'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to clob'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to clob'');' + +query: 'select CLOB_FIELD15 from table1;' + +drop: 'DROP table table1;' + +values: + - 'some text that needs to be converted to clob' + - 'some text that needs to be converted to clob' + - 'some text that needs to be converted to clob' + - 'some text that needs to be converted to clob' + - 'some text that needs to be converted to clob' + - 'some text that needs to be converted to clob' + - 'some text that needs to be converted to clob' + - 'some text that needs to be converted to clob' + - 'some text that needs to be converted to clob' + - 'some text that needs to be converted to clob' diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_date_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_date_h2.yml new file mode 100644 index 000000000000..de4e66b522c0 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_date_h2.yml @@ -0,0 +1,48 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_date_h2' + +type: 'date' + +vector: 'DATE_FIELD10' + +create: 'CREATE TABLE table1 (date_field10 DATE);' + +data: + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''1969-01-01'');' + +query: 'select date_field10 from table1;' + +drop: 'DROP table table1;' + +values: + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' + - '-365' diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_decimal_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_decimal_h2.yml new file mode 100644 index 000000000000..ff2edd4fe034 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_decimal_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_decimal_h2' + +type: 'decimal' + +vector: 'DECIMAL_FIELD6' + +create: 'CREATE TABLE table1 (decimal_field6 DECIMAL(20,2));' + +data: + - 'INSERT INTO table1 VALUES (17345667789.23);' + - 'INSERT INTO table1 VALUES (17345667789.23);' + - 'INSERT INTO table1 VALUES (17345667789.23);' + - 'INSERT INTO table1 VALUES (17345667789.23);' + - 'INSERT INTO table1 VALUES (17345667789.23);' + - 'INSERT INTO table1 VALUES (17345667789.23);' + - 'INSERT INTO table1 VALUES (17345667789.23);' + - 'INSERT INTO table1 VALUES (17345667789.23);' + - 'INSERT INTO table1 VALUES (17345667789.23);' + - 'INSERT INTO table1 VALUES (17345667789.23);' + +query: 'select decimal_field6 from table1;' + +drop: 'DROP table table1;' + +values: + - '17345667789.23' + - '17345667789.23' + - '17345667789.23' + - '17345667789.23' + - '17345667789.23' + - '17345667789.23' + - '17345667789.23' + - '17345667789.23' + - '17345667789.23' + - '17345667789.23' diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_double_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_double_h2.yml new file mode 100644 index 000000000000..e7fedd88daf0 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_double_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_double_h2' + +type: 'double' + +vector: 'DOUBLE_FIELD7' + +create: 'CREATE TABLE table1 (double_field7 DOUBLE);' + +data: + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + +query: 'select double_field7 from table1;' + +drop: 'DROP table table1;' + +values: + - '56478356785.345' + - '56478356785.345' + - '56478356785.345' + - '56478356785.345' + - '56478356785.345' + - '56478356785.345' + - '56478356785.345' + - '56478356785.345' + - '56478356785.345' + - '56478356785.345' diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_est_date_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_est_date_h2.yml new file mode 100644 index 000000000000..a6fa0fdf202b --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_est_date_h2.yml @@ -0,0 +1,48 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_est_date_h2' + +type: 'est_date' + +timezone: 'EST' + +vector: 'DATE_FIELD10' + +create: 'CREATE TABLE table1 (date_field10 DATE);' + +data: + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + +query: 'select date_field10 from table1;' + +drop: 'DROP table table1;' + +values: + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_est_time_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_est_time_h2.yml new file mode 100644 index 000000000000..31a1137c64a5 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_est_time_h2.yml @@ -0,0 +1,48 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_est_time_h2' + +type: 'est_time' + +timezone: 'EST' + +vector: 'TIME_FIELD9' + +create: 'CREATE TABLE table1 (time_field9 TIME);' + +data: + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + +query: 'select time_field9 from table1;' + +drop: 'DROP table table1;' + +values: + - '63935000' + - '63935000' + - '63935000' + - '63935000' + - '63935000' + - '63935000' + - '63935000' + - '63935000' + - '63935000' + - '63935000' diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_est_timestamp_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_est_timestamp_h2.yml new file mode 100644 index 000000000000..318a2c5851cc --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_est_timestamp_h2.yml @@ -0,0 +1,49 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_est_timestamp_h2' + +type: 'est_timestamp' + +timezone: 'EST' + +vector: 'TIMESTAMP_FIELD11' + +create: 'CREATE TABLE table1 (timestamp_field11 TIMESTAMP);' + +data: + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + +query: 'select timestamp_field11 from table1;' + +drop: 'DROP table table1;' + +values: + - '1518457535000' + - '1518457535000' + - '1518457535000' + - '1518457535000' + - '1518457535000' + - '1518457535000' + - '1518457535000' + - '1518457535000' + - '1518457535000' + - '1518457535000' + \ No newline at end of file diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_gmt_date_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_gmt_date_h2.yml new file mode 100644 index 000000000000..66bfc6b44f71 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_gmt_date_h2.yml @@ -0,0 +1,48 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_gmt_date_h2' + +type: 'gmt_date' + +timezone: 'GMT' + +vector: 'DATE_FIELD10' + +create: 'CREATE TABLE table1 (date_field10 DATE);' + +data: + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + +query: 'select date_field10 from table1;' + +drop: 'DROP table table1;' + +values: + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' \ No newline at end of file diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_gmt_time_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_gmt_time_h2.yml new file mode 100644 index 000000000000..1a6902d45d94 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_gmt_time_h2.yml @@ -0,0 +1,48 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_gmt_time_h2' + +type: 'gmt_time' + +timezone: 'GMT' + +vector: 'TIME_FIELD9' + +create: 'CREATE TABLE table1 (time_field9 TIME);' + +data: + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + +query: 'select time_field9 from table1;' + +drop: 'DROP table table1;' + +values: + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' \ No newline at end of file diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_gmt_timestamp_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_gmt_timestamp_h2.yml new file mode 100644 index 000000000000..b3059a07218c --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_gmt_timestamp_h2.yml @@ -0,0 +1,48 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_gmt_timestamp_h2' + +type: 'gmt_timestamp' + +timezone: 'GMT' + +vector: 'TIMESTAMP_FIELD11' + +create: 'CREATE TABLE table1 (timestamp_field11 TIMESTAMP);' + +data: + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + +query: 'select timestamp_field11 from table1;' + +drop: 'DROP table table1;' + +values: + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' \ No newline at end of file diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_int_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_int_h2.yml new file mode 100644 index 000000000000..8e8a8c4931ec --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_int_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_int_h2' + +type: 'int' + +vector: 'INT_FIELD1' + +create: 'CREATE TABLE table1 (int_field1 INT);' + +data: + - 'INSERT INTO table1 VALUES (101);' + - 'INSERT INTO table1 VALUES (101);' + - 'INSERT INTO table1 VALUES (101);' + - 'INSERT INTO table1 VALUES (101);' + - 'INSERT INTO table1 VALUES (101);' + - 'INSERT INTO table1 VALUES (101);' + - 'INSERT INTO table1 VALUES (101);' + - 'INSERT INTO table1 VALUES (101);' + - 'INSERT INTO table1 VALUES (101);' + - 'INSERT INTO table1 VALUES (101);' + +values: + - '101' + - '101' + - '101' + - '101' + - '101' + - '101' + - '101' + - '101' + - '101' + - '101' + +query: 'select int_field1 from table1;' + +drop: 'DROP table table1;' diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_list_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_list_h2.yml new file mode 100644 index 000000000000..044c22182af5 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_list_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_list_h2' + +type: 'list' + +vector: 'LIST_FIELD19' + +create: 'CREATE TABLE table1 (list_field19 INT ARRAY);' + +data: + - 'INSERT INTO table1 VALUES (ARRAY[1, 2, 3]);' + - 'INSERT INTO table1 VALUES (ARRAY[1, 2, 3]);' + - 'INSERT INTO table1 VALUES (ARRAY[1, 2, 3]);' + - 'INSERT INTO table1 VALUES (ARRAY[1, 2, 3]);' + - 'INSERT INTO table1 VALUES (ARRAY[1, 2, 3]);' + - 'INSERT INTO table1 VALUES (ARRAY[1, 2, 3]);' + - 'INSERT INTO table1 VALUES (ARRAY[1, 2, 3]);' + - 'INSERT INTO table1 VALUES (ARRAY[1, 2, 3]);' + - 'INSERT INTO table1 VALUES (ARRAY[1, 2, 3]);' + - 'INSERT INTO table1 VALUES (ARRAY[1, 2, 3]);' + +query: 'select list_field19 from table1;' + +drop: 'DROP table table1;' + +values: + - '(1;2;3)' + - '(1;2;3)' + - '(1;2;3)' + - '(1;2;3)' + - '(1;2;3)' + - '(1;2;3)' + - '(1;2;3)' + - '(1;2;3)' + - '(1;2;3)' + - '(1;2;3)' diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_map_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_map_h2.yml new file mode 100644 index 000000000000..a1800d20af60 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_map_h2.yml @@ -0,0 +1,33 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_map_h2' + +type: 'map' + +vector: 'MAP_FIELD20' + +create: 'CREATE TABLE table1 (map_field20 OTHER);' + +rowCount: '4' + +data: + - 'INSERT INTO table1 VALUES (X''aced00057372002e6f72672e6170616368652e6172726f772e766563746f722e7574696c2e4a736f6e537472696e67486173684d61709819d7169e7a2ecf020000787200176a6176612e7574696c2e4c696e6b6564486173684d617034c04e5c106cc0fb0200015a000b6163636573734f72646572787200116a6176612e7574696c2e486173684d61700507dac1c31660d103000246000a6c6f6164466163746f724900097468726573686f6c6478703f4000000000000c7708000000100000000374000161740001627400033132337400067177657274797400057a78637662740001217800'');' + - 'INSERT INTO table1 VALUES (X''aced00057372002e6f72672e6170616368652e6172726f772e766563746f722e7574696c2e4a736f6e537472696e67486173684d61709819d7169e7a2ecf020000787200176a6176612e7574696c2e4c696e6b6564486173684d617034c04e5c106cc0fb0200015a000b6163636573734f72646572787200116a6176612e7574696c2e486173684d61700507dac1c31660d103000246000a6c6f6164466163746f724900097468726573686f6c6478703f4000000000000c77080000001000000003740001617400016274000163740001647400033132337400067177657274797800'');' + - 'INSERT INTO table1 VALUES (X''aced00057372002e6f72672e6170616368652e6172726f772e766563746f722e7574696c2e4a736f6e537472696e67486173684d61709819d7169e7a2ecf020000787200176a6176612e7574696c2e4c696e6b6564486173684d617034c04e5c106cc0fb0200015a000b6163636573734f72646572787200116a6176612e7574696c2e486173684d61700507dac1c31660d103000246000a6c6f6164466163746f724900097468726573686f6c6478703f4000000000000c7708000000100000000174000074000576616c75657800'');' + - 'INSERT INTO table1 VALUES (X''aced00057372002e6f72672e6170616368652e6172726f772e766563746f722e7574696c2e4a736f6e537472696e67486173684d61709819d7169e7a2ecf020000787200176a6176612e7574696c2e4c696e6b6564486173684d617034c04e5c106cc0fb0200015a000b6163636573734f72646572787200116a6176612e7574696c2e486173684d61700507dac1c31660d103000246000a6c6f6164466163746f724900097468726573686f6c6478703f4000000000000c7708000000100000000274000b6e6f6e456d7074794b65797074000c736f6d654f746865724b65797400007800'');' + +query: 'select map_field20 from table1;' + +drop: 'DROP table table1;' + +values: + - 'MAP_FIELD20={"a":"b"|"123":"qwerty"|"zxcvb":"!"},{"a":"b"|"123":"qwerty"|"c":"d"},{"":"value"},{"nonEmptyKey":null|"someOtherKey":""}' \ No newline at end of file diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_null_and_notnull.yml b/java/adapter/jdbc/target/test-classes/h2/test1_null_and_notnull.yml new file mode 100644 index 000000000000..904b27783a38 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_null_and_notnull.yml @@ -0,0 +1,26 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_null_and_notnull' + +type: 'nullnotnull' + +create: 'CREATE TABLE table1 (int_field_null INT, int_field_notnull INT NOT NULL);' + +data: + - 'INSERT INTO table1 VALUES (0, 0);' + - 'INSERT INTO table1 VALUES (1, 1);' + +rowCount: '2' + +query: 'select int_field_null, int_field_notnull from table1;' + +drop: 'DROP table table1;' \ No newline at end of file diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_null_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_null_h2.yml new file mode 100644 index 000000000000..a33b18105a88 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_null_h2.yml @@ -0,0 +1,36 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_null_h2' + +type: 'null' + +vector: 'NULL_FIELD18' + +create: 'CREATE TABLE table1 (null_field18 NULL);' + +rowCount: '10' + +data: + - 'INSERT INTO table1 VALUES (null);' + - 'INSERT INTO table1 VALUES (null);' + - 'INSERT INTO table1 VALUES (null);' + - 'INSERT INTO table1 VALUES (null);' + - 'INSERT INTO table1 VALUES (null);' + - 'INSERT INTO table1 VALUES (null);' + - 'INSERT INTO table1 VALUES (null);' + - 'INSERT INTO table1 VALUES (null);' + - 'INSERT INTO table1 VALUES (null);' + - 'INSERT INTO table1 VALUES (null);' + +query: 'select null_field18 from table1;' + +drop: 'DROP table table1;' diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_pst_date_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_pst_date_h2.yml new file mode 100644 index 000000000000..6923537c6cee --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_pst_date_h2.yml @@ -0,0 +1,48 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_pst_date_h2' + +type: 'pst_date' + +timezone: 'PST' + +vector: 'DATE_FIELD10' + +create: 'CREATE TABLE table1 (date_field10 DATE);' + +data: + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + - 'INSERT INTO table1 VALUES (''2018-02-12'');' + +query: 'select date_field10 from table1;' + +drop: 'DROP table table1;' + +values: + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' + - '17574' \ No newline at end of file diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_pst_time_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_pst_time_h2.yml new file mode 100644 index 000000000000..f8faaead34c5 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_pst_time_h2.yml @@ -0,0 +1,48 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_pst_time_h2' + +type: 'pst_time' + +timezone: 'PST' + +vector: 'TIME_FIELD9' + +create: 'CREATE TABLE table1 (time_field9 TIME);' + +data: + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + +query: 'select time_field9 from table1;' + +drop: 'DROP table table1;' + +values: + - '74735000' + - '74735000' + - '74735000' + - '74735000' + - '74735000' + - '74735000' + - '74735000' + - '74735000' + - '74735000' + - '74735000' \ No newline at end of file diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_pst_timestamp_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_pst_timestamp_h2.yml new file mode 100644 index 000000000000..632479fce2e9 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_pst_timestamp_h2.yml @@ -0,0 +1,48 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_pst_timestamp_h2' + +type: 'pst_timestamp' + +timezone: 'PST' + +vector: 'TIMESTAMP_FIELD11' + +create: 'CREATE TABLE table1 (timestamp_field11 TIMESTAMP);' + +data: + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + +query: 'select timestamp_field11 from table1;' + +drop: 'DROP table table1;' + +values: + - '1518468335000' + - '1518468335000' + - '1518468335000' + - '1518468335000' + - '1518468335000' + - '1518468335000' + - '1518468335000' + - '1518468335000' + - '1518468335000' + - '1518468335000' \ No newline at end of file diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_real_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_real_h2.yml new file mode 100644 index 000000000000..8ca58c126b1b --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_real_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_real_h2' + +type: 'real' + +vector: 'REAL_FIELD8' + +create: 'CREATE TABLE table1 (real_field8 REAL);' + +data: + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + - 'INSERT INTO table1 VALUES (56478356785.345);' + +query: 'select real_field8 from table1;' + +drop: 'DROP table table1;' + +values: + - '56478356785.345f' + - '56478356785.345f' + - '56478356785.345f' + - '56478356785.345f' + - '56478356785.345f' + - '56478356785.345f' + - '56478356785.345f' + - '56478356785.345f' + - '56478356785.345f' + - '56478356785.345f' diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_selected_datatypes_null_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_selected_datatypes_null_h2.yml new file mode 100644 index 000000000000..60a4462272c7 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_selected_datatypes_null_h2.yml @@ -0,0 +1,49 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_selected_datatypes_null_h2' + +type: 'selected_null_column' + +vectors: + - 'BIGINT_FIELD5' + - 'DECIMAL_FIELD6' + - 'DOUBLE_FIELD7' + - 'REAL_FIELD8' + - 'TIME_FIELD9' + - 'DATE_FIELD10' + - 'TIMESTAMP_FIELD11' + - 'BINARY_FIELD12' + - 'VARCHAR_FIELD13' + - 'BLOB_FIELD14' + - 'CLOB_FIELD15' + - 'CHAR_FIELD16' + - 'BIT_FIELD17' + - 'LIST_FIELD19' + - 'MAP_FIELD20' + +rowCount: '5' + +create: 'CREATE TABLE table1 (int_field1 INT, bool_field2 BOOLEAN, tinyint_field3 TINYINT, smallint_field4 SMALLINT, bigint_field5 BIGINT, + decimal_field6 DECIMAL(20,2), double_field7 DOUBLE, real_field8 REAL, time_field9 TIME, date_field10 DATE, timestamp_field11 TIMESTAMP, + binary_field12 VARBINARY(100), varchar_field13 VARCHAR(256), blob_field14 BLOB, clob_field15 CLOB, char_field16 CHAR(14), bit_field17 BIT, + list_field19 INT ARRAY, map_field20 VARCHAR(256));' + +data: + - 'INSERT INTO table1 (int_field1, bool_field2, tinyint_field3, smallint_field4) VALUES (102, 0, 46, 12001);' + - 'INSERT INTO table1 (int_field1, bool_field2, tinyint_field3, smallint_field4) VALUES (102, 0, 46, 12001);' + - 'INSERT INTO table1 (int_field1, bool_field2, tinyint_field3, smallint_field4) VALUES (102, 0, 46, 12001);' + - 'INSERT INTO table1 (int_field1, bool_field2, tinyint_field3, smallint_field4) VALUES (102, 0, 46, 12001);' + - 'INSERT INTO table1 (int_field1, bool_field2, tinyint_field3, smallint_field4) VALUES (102, 0, 46, 12001);' + +query: 'select bigint_field5, decimal_field6, double_field7, real_field8, time_field9, date_field10, timestamp_field11, binary_field12, varchar_field13, blob_field14, clob_field15, char_field16, bit_field17, list_field19, map_field20 from table1' + +drop: 'DROP table table1;' \ No newline at end of file diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_smallint_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_smallint_h2.yml new file mode 100644 index 000000000000..2be83f8884ab --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_smallint_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_smallint_h2' + +type: 'small_int' + +vector: 'SMALLINT_FIELD4' + +create: 'CREATE TABLE table1 (smallint_field4 SMALLINT);' + +data: + - 'INSERT INTO table1 VALUES (12000);' + - 'INSERT INTO table1 VALUES (12000);' + - 'INSERT INTO table1 VALUES (12000);' + - 'INSERT INTO table1 VALUES (12000);' + - 'INSERT INTO table1 VALUES (12000);' + - 'INSERT INTO table1 VALUES (12000);' + - 'INSERT INTO table1 VALUES (12000);' + - 'INSERT INTO table1 VALUES (12000);' + - 'INSERT INTO table1 VALUES (12000);' + - 'INSERT INTO table1 VALUES (12000);' + +query: 'select smallint_field4 from table1;' + +drop: 'DROP table table1;' + +values: + - '12000' + - '12000' + - '12000' + - '12000' + - '12000' + - '12000' + - '12000' + - '12000' + - '12000' + - '12000' diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_time_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_time_h2.yml new file mode 100644 index 000000000000..323c971b9c11 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_time_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_time_h2' + +type: 'time' + +vector: 'TIME_FIELD9' + +create: 'CREATE TABLE table1 (time_field9 TIME);' + +data: + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + - 'INSERT INTO table1 VALUES (''12:45:35'');' + +query: 'select time_field9 from table1;' + +drop: 'DROP table table1;' + +values: + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' + - '45935000' diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_timestamp_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_timestamp_h2.yml new file mode 100644 index 000000000000..380bed20c091 --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_timestamp_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_timestamp_h2' + +type: 'timestamp' + +vector: 'TIMESTAMP_FIELD11' + +create: 'CREATE TABLE table1 (timestamp_field11 TIMESTAMP);' + +data: + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + - 'INSERT INTO table1 VALUES (''2018-02-12 12:45:35'');' + +query: 'select timestamp_field11 from table1;' + +drop: 'DROP table table1;' + +values: + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' + - '1518439535000' diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_tinyint_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_tinyint_h2.yml new file mode 100644 index 000000000000..04dad785539f --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_tinyint_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_tinyint_h2' + +type: 'tinyint' + +vector: 'TINYINT_FIELD3' + +create: 'CREATE TABLE table1 (tinyint_field3 TINYINT);' + +data: + - 'INSERT INTO table1 VALUES (45);' + - 'INSERT INTO table1 VALUES (45);' + - 'INSERT INTO table1 VALUES (45);' + - 'INSERT INTO table1 VALUES (45);' + - 'INSERT INTO table1 VALUES (45);' + - 'INSERT INTO table1 VALUES (45);' + - 'INSERT INTO table1 VALUES (45);' + - 'INSERT INTO table1 VALUES (45);' + - 'INSERT INTO table1 VALUES (45);' + - 'INSERT INTO table1 VALUES (45);' + +query: 'select tinyint_field3 from table1;' + +drop: 'DROP table table1;' + +values: + - '45' + - '45' + - '45' + - '45' + - '45' + - '45' + - '45' + - '45' + - '45' + - '45' diff --git a/java/adapter/jdbc/target/test-classes/h2/test1_varchar_h2.yml b/java/adapter/jdbc/target/test-classes/h2/test1_varchar_h2.yml new file mode 100644 index 000000000000..6e6768002cfa --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/h2/test1_varchar_h2.yml @@ -0,0 +1,46 @@ +#Licensed to the Apache Software Foundation (ASF) under one or more contributor +#license agreements. See the NOTICE file distributed with this work for additional +#information regarding copyright ownership. The ASF licenses this file to +#You under the Apache License, Version 2.0 (the "License"); you may not use +#this file except in compliance with the License. You may obtain a copy of +#the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required +#by applicable law or agreed to in writing, software distributed under the +#License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS +#OF ANY KIND, either express or implied. See the License for the specific +#language governing permissions and limitations under the License. + +name: 'test1_varchar_h2' + +type: 'varchar' + +vector: 'VARCHAR_FIELD13' + +create: 'CREATE TABLE table1 (varchar_field13 VARCHAR(256));' + +data: + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to varchar'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to varchar'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to varchar'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to varchar'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to varchar'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to varchar'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to varchar'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to varchar'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to varchar'');' + - 'INSERT INTO table1 VALUES (''some text that needs to be converted to varchar'');' + +query: 'select varchar_field13 from table1;' + +drop: 'DROP table table1;' + +values: + - 'some text that needs to be converted to varchar' + - 'some text that needs to be converted to varchar' + - 'some text that needs to be converted to varchar' + - 'some text that needs to be converted to varchar' + - 'some text that needs to be converted to varchar' + - 'some text that needs to be converted to varchar' + - 'some text that needs to be converted to varchar' + - 'some text that needs to be converted to varchar' + - 'some text that needs to be converted to varchar' + - 'some text that needs to be converted to varchar' diff --git a/java/adapter/jdbc/target/test-classes/logback.xml b/java/adapter/jdbc/target/test-classes/logback.xml new file mode 100644 index 000000000000..4c54d18a210f --- /dev/null +++ b/java/adapter/jdbc/target/test-classes/logback.xml @@ -0,0 +1,28 @@ + + + + + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + + + diff --git a/java/adapter/orc/target/classes/arrow-git.properties b/java/adapter/orc/target/classes/arrow-git.properties new file mode 100644 index 000000000000..7e7c2de2c17c --- /dev/null +++ b/java/adapter/orc/target/classes/arrow-git.properties @@ -0,0 +1,26 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=21.08.2025 @ 12\:10\:19 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Ra\u00FAl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 diff --git a/java/algorithm/target/classes/arrow-git.properties b/java/algorithm/target/classes/arrow-git.properties new file mode 100644 index 000000000000..680f859581ab --- /dev/null +++ b/java/algorithm/target/classes/arrow-git.properties @@ -0,0 +1,26 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=21.08.2025 @ 12\:10\:07 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Ra\u00FAl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 diff --git a/java/algorithm/target/test-classes/logback.xml b/java/algorithm/target/test-classes/logback.xml new file mode 100644 index 000000000000..4c54d18a210f --- /dev/null +++ b/java/algorithm/target/test-classes/logback.xml @@ -0,0 +1,28 @@ + + + + + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + + + diff --git a/java/c/target/classes/arrow-git.properties b/java/c/target/classes/arrow-git.properties new file mode 100644 index 000000000000..4296c571449b --- /dev/null +++ b/java/c/target/classes/arrow-git.properties @@ -0,0 +1,26 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=21.08.2025 @ 12\:10\:09 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Ra\u00FAl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 diff --git a/java/compression/target/classes/arrow-git.properties b/java/compression/target/classes/arrow-git.properties new file mode 100644 index 000000000000..e7224e45ae77 --- /dev/null +++ b/java/compression/target/classes/arrow-git.properties @@ -0,0 +1,26 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=21.08.2025 @ 12\:10\:10 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Ra\u00FAl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 diff --git a/java/compression/target/test-classes/logback.xml b/java/compression/target/test-classes/logback.xml new file mode 100644 index 000000000000..4c54d18a210f --- /dev/null +++ b/java/compression/target/test-classes/logback.xml @@ -0,0 +1,28 @@ + + + + + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + + + diff --git a/java/dataset/target/classes/arrow-git.properties b/java/dataset/target/classes/arrow-git.properties new file mode 100644 index 000000000000..194e9a9057fc --- /dev/null +++ b/java/dataset/target/classes/arrow-git.properties @@ -0,0 +1,26 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=21.08.2025 @ 12\:10\:11 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Ra\u00FAl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 diff --git a/java/dataset/target/test-classes/avroschema/user.avsc b/java/dataset/target/test-classes/avroschema/user.avsc new file mode 100644 index 000000000000..5a4635b6dce7 --- /dev/null +++ b/java/dataset/target/test-classes/avroschema/user.avsc @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +{ + "namespace": "org.apache.arrow.dataset", + "type": "record", + "name": "Users", + "fields": [ + {"name": "id", "type": ["int", "null"]}, + {"name": "name", "type": ["string", "null"]} + ] +} diff --git a/java/dataset/target/test-classes/substrait/local_files_users.json b/java/dataset/target/test-classes/substrait/local_files_users.json new file mode 100644 index 000000000000..a2f5af1b3b80 --- /dev/null +++ b/java/dataset/target/test-classes/substrait/local_files_users.json @@ -0,0 +1,75 @@ +{ + "extensionUris": [], + "extensions": [], + "relations": [{ + "root": { + "input": { + "project": { + "common": { + "emit": { + "outputMapping": [2, 3] + } + }, + "input": { + "read": { + "common": { + "direct": { + } + }, + "baseSchema": { + "names": ["ID", "NAME"], + "struct": { + "types": [{ + "i32": { + "typeVariationReference": 0, + "nullability": "NULLABILITY_REQUIRED" + } + }, { + "varchar": { + "length": 150, + "typeVariationReference": 0, + "nullability": "NULLABILITY_NULLABLE" + } + }], + "typeVariationReference": 0, + "nullability": "NULLABILITY_REQUIRED" + } + }, + "local_files": { + "items": [ + { + "uri_file": "FILENAME_PLACEHOLDER", + "parquet": {} + } + ] + } + } + }, + "expressions": [{ + "selection": { + "directReference": { + "structField": { + "field": 0 + } + }, + "rootReference": { + } + } + }, { + "selection": { + "directReference": { + "structField": { + "field": 1 + } + }, + "rootReference": { + } + } + }] + } + }, + "names": ["ID", "NAME"] + } + }], + "expectedTypeUrls": [] +} \ No newline at end of file diff --git a/java/dataset/target/test-classes/substrait/named_table_users.json b/java/dataset/target/test-classes/substrait/named_table_users.json new file mode 100644 index 000000000000..629eebd05977 --- /dev/null +++ b/java/dataset/target/test-classes/substrait/named_table_users.json @@ -0,0 +1,70 @@ +{ + "extensionUris": [], + "extensions": [], + "relations": [{ + "root": { + "input": { + "project": { + "common": { + "emit": { + "outputMapping": [2, 3] + } + }, + "input": { + "read": { + "common": { + "direct": { + } + }, + "baseSchema": { + "names": ["ID", "NAME"], + "struct": { + "types": [{ + "i32": { + "typeVariationReference": 0, + "nullability": "NULLABILITY_REQUIRED" + } + }, { + "varchar": { + "length": 150, + "typeVariationReference": 0, + "nullability": "NULLABILITY_NULLABLE" + } + }], + "typeVariationReference": 0, + "nullability": "NULLABILITY_REQUIRED" + } + }, + "namedTable": { + "names": ["USERS"] + } + } + }, + "expressions": [{ + "selection": { + "directReference": { + "structField": { + "field": 0 + } + }, + "rootReference": { + } + } + }, { + "selection": { + "directReference": { + "structField": { + "field": 1 + } + }, + "rootReference": { + } + } + }] + } + }, + "names": ["ID", "NAME"] + } + }], + "expectedTypeUrls": [] +} \ No newline at end of file diff --git a/java/flight/flight-core/target/classes/Flight.proto b/java/flight/flight-core/target/classes/Flight.proto new file mode 100644 index 000000000000..4963e8c09ae4 --- /dev/null +++ b/java/flight/flight-core/target/classes/Flight.proto @@ -0,0 +1,645 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; +import "google/protobuf/timestamp.proto"; + +option java_package = "org.apache.arrow.flight.impl"; +option go_package = "github.com/apache/arrow/go/arrow/flight/gen/flight"; +option csharp_namespace = "Apache.Arrow.Flight.Protocol"; + +package arrow.flight.protocol; + +/* + * A flight service is an endpoint for retrieving or storing Arrow data. A + * flight service can expose one or more predefined endpoints that can be + * accessed using the Arrow Flight Protocol. Additionally, a flight service + * can expose a set of actions that are available. + */ +service FlightService { + + /* + * Handshake between client and server. Depending on the server, the + * handshake may be required to determine the token that should be used for + * future operations. Both request and response are streams to allow multiple + * round-trips depending on auth mechanism. + */ + rpc Handshake(stream HandshakeRequest) returns (stream HandshakeResponse) {} + + /* + * Get a list of available streams given a particular criteria. Most flight + * services will expose one or more streams that are readily available for + * retrieval. This api allows listing the streams available for + * consumption. A user can also provide a criteria. The criteria can limit + * the subset of streams that can be listed via this interface. Each flight + * service allows its own definition of how to consume criteria. + */ + rpc ListFlights(Criteria) returns (stream FlightInfo) {} + + /* + * For a given FlightDescriptor, get information about how the flight can be + * consumed. This is a useful interface if the consumer of the interface + * already can identify the specific flight to consume. This interface can + * also allow a consumer to generate a flight stream through a specified + * descriptor. For example, a flight descriptor might be something that + * includes a SQL statement or a Pickled Python operation that will be + * executed. In those cases, the descriptor will not be previously available + * within the list of available streams provided by ListFlights but will be + * available for consumption for the duration defined by the specific flight + * service. + */ + rpc GetFlightInfo(FlightDescriptor) returns (FlightInfo) {} + + /* + * For a given FlightDescriptor, start a query and get information + * to poll its execution status. This is a useful interface if the + * query may be a long-running query. The first PollFlightInfo call + * should return as quickly as possible. (GetFlightInfo doesn't + * return until the query is complete.) + * + * A client can consume any available results before + * the query is completed. See PollInfo.info for details. + * + * A client can poll the updated query status by calling + * PollFlightInfo() with PollInfo.flight_descriptor. A server + * should not respond until the result would be different from last + * time. That way, the client can "long poll" for updates + * without constantly making requests. Clients can set a short timeout + * to avoid blocking calls if desired. + * + * A client can't use PollInfo.flight_descriptor after + * PollInfo.expiration_time passes. A server might not accept the + * retry descriptor anymore and the query may be cancelled. + * + * A client may use the CancelFlightInfo action with + * PollInfo.info to cancel the running query. + */ + rpc PollFlightInfo(FlightDescriptor) returns (PollInfo) {} + + /* + * For a given FlightDescriptor, get the Schema as described in Schema.fbs::Schema + * This is used when a consumer needs the Schema of flight stream. Similar to + * GetFlightInfo this interface may generate a new flight that was not previously + * available in ListFlights. + */ + rpc GetSchema(FlightDescriptor) returns (SchemaResult) {} + + /* + * Retrieve a single stream associated with a particular descriptor + * associated with the referenced ticket. A Flight can be composed of one or + * more streams where each stream can be retrieved using a separate opaque + * ticket that the flight service uses for managing a collection of streams. + */ + rpc DoGet(Ticket) returns (stream FlightData) {} + + /* + * Push a stream to the flight service associated with a particular + * flight stream. This allows a client of a flight service to upload a stream + * of data. Depending on the particular flight service, a client consumer + * could be allowed to upload a single stream per descriptor or an unlimited + * number. In the latter, the service might implement a 'seal' action that + * can be applied to a descriptor once all streams are uploaded. + */ + rpc DoPut(stream FlightData) returns (stream PutResult) {} + + /* + * Open a bidirectional data channel for a given descriptor. This + * allows clients to send and receive arbitrary Arrow data and + * application-specific metadata in a single logical stream. In + * contrast to DoGet/DoPut, this is more suited for clients + * offloading computation (rather than storage) to a Flight service. + */ + rpc DoExchange(stream FlightData) returns (stream FlightData) {} + + /* + * Flight services can support an arbitrary number of simple actions in + * addition to the possible ListFlights, GetFlightInfo, DoGet, DoPut + * operations that are potentially available. DoAction allows a flight client + * to do a specific action against a flight service. An action includes + * opaque request and response objects that are specific to the type action + * being undertaken. + */ + rpc DoAction(Action) returns (stream Result) {} + + /* + * A flight service exposes all of the available action types that it has + * along with descriptions. This allows different flight consumers to + * understand the capabilities of the flight service. + */ + rpc ListActions(Empty) returns (stream ActionType) {} +} + +/* + * The request that a client provides to a server on handshake. + */ +message HandshakeRequest { + + /* + * A defined protocol version + */ + uint64 protocol_version = 1; + + /* + * Arbitrary auth/handshake info. + */ + bytes payload = 2; +} + +message HandshakeResponse { + + /* + * A defined protocol version + */ + uint64 protocol_version = 1; + + /* + * Arbitrary auth/handshake info. + */ + bytes payload = 2; +} + +/* + * A message for doing simple auth. + */ +message BasicAuth { + string username = 2; + string password = 3; +} + +message Empty {} + +/* + * Describes an available action, including both the name used for execution + * along with a short description of the purpose of the action. + */ +message ActionType { + string type = 1; + string description = 2; +} + +/* + * A service specific expression that can be used to return a limited set + * of available Arrow Flight streams. + */ +message Criteria { + bytes expression = 1; +} + +/* + * An opaque action specific for the service. + */ +message Action { + string type = 1; + bytes body = 2; +} + +/* + * The request of the CancelFlightInfo action. + * + * The request should be stored in Action.body. + */ +message CancelFlightInfoRequest { + FlightInfo info = 1; +} + +/* + * The request of the RenewFlightEndpoint action. + * + * The request should be stored in Action.body. + */ +message RenewFlightEndpointRequest { + FlightEndpoint endpoint = 1; +} + +/* + * An opaque result returned after executing an action. + */ +message Result { + bytes body = 1; +} + +/* + * The result of a cancel operation. + * + * This is used by CancelFlightInfoResult.status. + */ +enum CancelStatus { + // The cancellation status is unknown. Servers should avoid using + // this value (send a NOT_FOUND error if the requested query is + // not known). Clients can retry the request. + CANCEL_STATUS_UNSPECIFIED = 0; + // The cancellation request is complete. Subsequent requests with + // the same payload may return CANCELLED or a NOT_FOUND error. + CANCEL_STATUS_CANCELLED = 1; + // The cancellation request is in progress. The client may retry + // the cancellation request. + CANCEL_STATUS_CANCELLING = 2; + // The query is not cancellable. The client should not retry the + // cancellation request. + CANCEL_STATUS_NOT_CANCELLABLE = 3; +} + +/* + * The result of the CancelFlightInfo action. + * + * The result should be stored in Result.body. + */ +message CancelFlightInfoResult { + CancelStatus status = 1; +} + +/* + * Wrap the result of a getSchema call + */ +message SchemaResult { + // The schema of the dataset in its IPC form: + // 4 bytes - an optional IPC_CONTINUATION_TOKEN prefix + // 4 bytes - the byte length of the payload + // a flatbuffer Message whose header is the Schema + bytes schema = 1; +} + +/* + * The name or tag for a Flight. May be used as a way to retrieve or generate + * a flight or be used to expose a set of previously defined flights. + */ +message FlightDescriptor { + + /* + * Describes what type of descriptor is defined. + */ + enum DescriptorType { + + // Protobuf pattern, not used. + UNKNOWN = 0; + + /* + * A named path that identifies a dataset. A path is composed of a string + * or list of strings describing a particular dataset. This is conceptually + * similar to a path inside a filesystem. + */ + PATH = 1; + + /* + * An opaque command to generate a dataset. + */ + CMD = 2; + } + + DescriptorType type = 1; + + /* + * Opaque value used to express a command. Should only be defined when + * type = CMD. + */ + bytes cmd = 2; + + /* + * List of strings identifying a particular dataset. Should only be defined + * when type = PATH. + */ + repeated string path = 3; +} + +/* + * The access coordinates for retrieval of a dataset. With a FlightInfo, a + * consumer is able to determine how to retrieve a dataset. + */ +message FlightInfo { + // The schema of the dataset in its IPC form: + // 4 bytes - an optional IPC_CONTINUATION_TOKEN prefix + // 4 bytes - the byte length of the payload + // a flatbuffer Message whose header is the Schema + bytes schema = 1; + + /* + * The descriptor associated with this info. + */ + FlightDescriptor flight_descriptor = 2; + + /* + * A list of endpoints associated with the flight. To consume the + * whole flight, all endpoints (and hence all Tickets) must be + * consumed. Endpoints can be consumed in any order. + * + * In other words, an application can use multiple endpoints to + * represent partitioned data. + * + * If the returned data has an ordering, an application can use + * "FlightInfo.ordered = true" or should return the all data in a + * single endpoint. Otherwise, there is no ordering defined on + * endpoints or the data within. + * + * A client can read ordered data by reading data from returned + * endpoints, in order, from front to back. + * + * Note that a client may ignore "FlightInfo.ordered = true". If an + * ordering is important for an application, an application must + * choose one of them: + * + * * An application requires that all clients must read data in + * returned endpoints order. + * * An application must return the all data in a single endpoint. + */ + repeated FlightEndpoint endpoint = 3; + + // Set these to -1 if unknown. + int64 total_records = 4; + int64 total_bytes = 5; + + /* + * FlightEndpoints are in the same order as the data. + */ + bool ordered = 6; + + /* + * Application-defined metadata. + * + * There is no inherent or required relationship between this + * and the app_metadata fields in the FlightEndpoints or resulting + * FlightData messages. Since this metadata is application-defined, + * a given application could define there to be a relationship, + * but there is none required by the spec. + */ + bytes app_metadata = 7; +} + +/* + * The information to process a long-running query. + */ +message PollInfo { + /* + * The currently available results. + * + * If "flight_descriptor" is not specified, the query is complete + * and "info" specifies all results. Otherwise, "info" contains + * partial query results. + * + * Note that each PollInfo response contains a complete + * FlightInfo (not just the delta between the previous and current + * FlightInfo). + * + * Subsequent PollInfo responses may only append new endpoints to + * info. + * + * Clients can begin fetching results via DoGet(Ticket) with the + * ticket in the info before the query is + * completed. FlightInfo.ordered is also valid. + */ + FlightInfo info = 1; + + /* + * The descriptor the client should use on the next try. + * If unset, the query is complete. + */ + FlightDescriptor flight_descriptor = 2; + + /* + * Query progress. If known, must be in [0.0, 1.0] but need not be + * monotonic or nondecreasing. If unknown, do not set. + */ + optional double progress = 3; + + /* + * Expiration time for this request. After this passes, the server + * might not accept the retry descriptor anymore (and the query may + * be cancelled). This may be updated on a call to PollFlightInfo. + */ + google.protobuf.Timestamp expiration_time = 4; +} + +/* + * A particular stream or split associated with a flight. + */ +message FlightEndpoint { + + /* + * Token used to retrieve this stream. + */ + Ticket ticket = 1; + + /* + * A list of URIs where this ticket can be redeemed via DoGet(). + * + * If the list is empty, the expectation is that the ticket can only + * be redeemed on the current service where the ticket was + * generated. + * + * If the list is not empty, the expectation is that the ticket can be + * redeemed at any of the locations, and that the data returned will be + * equivalent. In this case, the ticket may only be redeemed at one of the + * given locations, and not (necessarily) on the current service. If one + * of the given locations is "arrow-flight-reuse-connection://?", the + * client may redeem the ticket on the service where the ticket was + * generated (i.e., the same as above), in addition to the other + * locations. (This URI was chosen to maximize compatibility, as 'scheme:' + * or 'scheme://' are not accepted by Java's java.net.URI.) + * + * In other words, an application can use multiple locations to + * represent redundant and/or load balanced services. + */ + repeated Location location = 2; + + /* + * Expiration time of this stream. If present, clients may assume + * they can retry DoGet requests. Otherwise, it is + * application-defined whether DoGet requests may be retried. + */ + google.protobuf.Timestamp expiration_time = 3; + + /* + * Application-defined metadata. + * + * There is no inherent or required relationship between this + * and the app_metadata fields in the FlightInfo or resulting + * FlightData messages. Since this metadata is application-defined, + * a given application could define there to be a relationship, + * but there is none required by the spec. + */ + bytes app_metadata = 4; +} + +/* + * A location where a Flight service will accept retrieval of a particular + * stream given a ticket. + */ +message Location { + string uri = 1; +} + +/* + * An opaque identifier that the service can use to retrieve a particular + * portion of a stream. + * + * Tickets are meant to be single use. It is an error/application-defined + * behavior to reuse a ticket. + */ +message Ticket { + bytes ticket = 1; +} + +/* + * A batch of Arrow data as part of a stream of batches. + */ +message FlightData { + + /* + * The descriptor of the data. This is only relevant when a client is + * starting a new DoPut stream. + */ + FlightDescriptor flight_descriptor = 1; + + /* + * Header for message data as described in Message.fbs::Message. + */ + bytes data_header = 2; + + /* + * Application-defined metadata. + */ + bytes app_metadata = 3; + + /* + * The actual batch of Arrow data. Preferably handled with minimal-copies + * coming last in the definition to help with sidecar patterns (it is + * expected that some implementations will fetch this field off the wire + * with specialized code to avoid extra memory copies). + */ + bytes data_body = 1000; +} + +/** + * The response message associated with the submission of a DoPut. + */ +message PutResult { + bytes app_metadata = 1; +} + +/* + * EXPERIMENTAL: Union of possible value types for a Session Option to be set to. + * + * By convention, an attempt to set a valueless SessionOptionValue should + * attempt to unset or clear the named option value on the server. + */ +message SessionOptionValue { + message StringListValue { + repeated string values = 1; + } + + oneof option_value { + string string_value = 1; + bool bool_value = 2; + sfixed64 int64_value = 3; + double double_value = 4; + StringListValue string_list_value = 5; + } +} + +/* + * EXPERIMENTAL: A request to set session options for an existing or new (implicit) + * server session. + * + * Sessions are persisted and referenced via a transport-level state management, typically + * RFC 6265 HTTP cookies when using an HTTP transport. The suggested cookie name or state + * context key is 'arrow_flight_session_id', although implementations may freely choose their + * own name. + * + * Session creation (if one does not already exist) is implied by this RPC request, however + * server implementations may choose to initiate a session that also contains client-provided + * session options at any other time, e.g. on authentication, or when any other call is made + * and the server wishes to use a session to persist any state (or lack thereof). + */ +message SetSessionOptionsRequest { + map session_options = 1; +} + +/* + * EXPERIMENTAL: The results (individually) of setting a set of session options. + * + * Option names should only be present in the response if they were not successfully + * set on the server; that is, a response without an Error for a name provided in the + * SetSessionOptionsRequest implies that the named option value was set successfully. + */ +message SetSessionOptionsResult { + enum ErrorValue { + // Protobuf deserialization fallback value: The status is unknown or unrecognized. + // Servers should avoid using this value. The request may be retried by the client. + UNSPECIFIED = 0; + // The given session option name is invalid. + INVALID_NAME = 1; + // The session option value or type is invalid. + INVALID_VALUE = 2; + // The session option cannot be set. + ERROR = 3; + } + + message Error { + ErrorValue value = 1; + } + + map errors = 1; +} + +/* + * EXPERIMENTAL: A request to access the session options for the current server session. + * + * The existing session is referenced via a cookie header or similar (see + * SetSessionOptionsRequest above); it is an error to make this request with a missing, + * invalid, or expired session cookie header or other implementation-defined session + * reference token. + */ +message GetSessionOptionsRequest { +} + +/* + * EXPERIMENTAL: The result containing the current server session options. + */ +message GetSessionOptionsResult { + map session_options = 1; +} + +/* + * Request message for the "Close Session" action. + * + * The exiting session is referenced via a cookie header. + */ +message CloseSessionRequest { +} + +/* + * The result of closing a session. + */ +message CloseSessionResult { + enum Status { + // Protobuf deserialization fallback value: The session close status is unknown or + // not recognized. Servers should avoid using this value (send a NOT_FOUND error if + // the requested session is not known or expired). Clients can retry the request. + UNSPECIFIED = 0; + // The session close request is complete. Subsequent requests with + // the same session produce a NOT_FOUND error. + CLOSED = 1; + // The session close request is in progress. The client may retry + // the close request. + CLOSING = 2; + // The session is not closeable. The client should not retry the + // close request. + NOT_CLOSEABLE = 3; + } + + Status status = 1; +} diff --git a/java/flight/flight-core/target/classes/FlightSql.proto b/java/flight/flight-core/target/classes/FlightSql.proto new file mode 100644 index 000000000000..3282ee4f4730 --- /dev/null +++ b/java/flight/flight-core/target/classes/FlightSql.proto @@ -0,0 +1,1881 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; +import "google/protobuf/descriptor.proto"; + +option java_package = "org.apache.arrow.flight.sql.impl"; +option go_package = "github.com/apache/arrow/go/arrow/flight/gen/flight"; +package arrow.flight.protocol.sql; + +/* + * Represents a metadata request. Used in the command member of FlightDescriptor + * for the following RPC calls: + * - GetSchema: return the Arrow schema of the query. + * - GetFlightInfo: execute the metadata request. + * + * The returned Arrow schema will be: + * < + * info_name: uint32 not null, + * value: dense_union< + * string_value: utf8, + * bool_value: bool, + * bigint_value: int64, + * int32_bitmask: int32, + * string_list: list + * int32_to_int32_list_map: map> + * > + * where there is one row per requested piece of metadata information. + */ +message CommandGetSqlInfo { + option (experimental) = true; + + /* + * Values are modelled after ODBC's SQLGetInfo() function. This information is intended to provide + * Flight SQL clients with basic, SQL syntax and SQL functions related information. + * More information types can be added in future releases. + * E.g. more SQL syntax support types, scalar functions support, type conversion support etc. + * + * Note that the set of metadata may expand. + * + * Initially, Flight SQL will support the following information types: + * - Server Information - Range [0-500) + * - Syntax Information - Range [500-1000) + * Range [0-10,000) is reserved for defaults (see SqlInfo enum for default options). + * Custom options should start at 10,000. + * + * If omitted, then all metadata will be retrieved. + * Flight SQL Servers may choose to include additional metadata above and beyond the specified set, however they must + * at least return the specified set. IDs ranging from 0 to 10,000 (exclusive) are reserved for future use. + * If additional metadata is included, the metadata IDs should start from 10,000. + */ + repeated uint32 info = 1; +} + +// Options for CommandGetSqlInfo. +enum SqlInfo { + + // Server Information [0-500): Provides basic information about the Flight SQL Server. + + // Retrieves a UTF-8 string with the name of the Flight SQL Server. + FLIGHT_SQL_SERVER_NAME = 0; + + // Retrieves a UTF-8 string with the native version of the Flight SQL Server. + FLIGHT_SQL_SERVER_VERSION = 1; + + // Retrieves a UTF-8 string with the Arrow format version of the Flight SQL Server. + FLIGHT_SQL_SERVER_ARROW_VERSION = 2; + + /* + * Retrieves a boolean value indicating whether the Flight SQL Server is read only. + * + * Returns: + * - false: if read-write + * - true: if read only + */ + FLIGHT_SQL_SERVER_READ_ONLY = 3; + + /* + * Retrieves a boolean value indicating whether the Flight SQL Server supports executing + * SQL queries. + * + * Note that the absence of this info (as opposed to a false value) does not necessarily + * mean that SQL is not supported, as this property was not originally defined. + */ + FLIGHT_SQL_SERVER_SQL = 4; + + /* + * Retrieves a boolean value indicating whether the Flight SQL Server supports executing + * Substrait plans. + */ + FLIGHT_SQL_SERVER_SUBSTRAIT = 5; + + /* + * Retrieves a string value indicating the minimum supported Substrait version, or null + * if Substrait is not supported. + */ + FLIGHT_SQL_SERVER_SUBSTRAIT_MIN_VERSION = 6; + + /* + * Retrieves a string value indicating the maximum supported Substrait version, or null + * if Substrait is not supported. + */ + FLIGHT_SQL_SERVER_SUBSTRAIT_MAX_VERSION = 7; + + /* + * Retrieves an int32 indicating whether the Flight SQL Server supports the + * BeginTransaction/EndTransaction/BeginSavepoint/EndSavepoint actions. + * + * Even if this is not supported, the database may still support explicit "BEGIN + * TRANSACTION"/"COMMIT" SQL statements (see SQL_TRANSACTIONS_SUPPORTED); this property + * is only about whether the server implements the Flight SQL API endpoints. + * + * The possible values are listed in `SqlSupportedTransaction`. + */ + FLIGHT_SQL_SERVER_TRANSACTION = 8; + + /* + * Retrieves a boolean value indicating whether the Flight SQL Server supports explicit + * query cancellation (the CancelQuery action). + */ + FLIGHT_SQL_SERVER_CANCEL = 9; + + /* + * Retrieves an int32 indicating the timeout (in milliseconds) for prepared statement handles. + * + * If 0, there is no timeout. Servers should reset the timeout when the handle is used in a command. + */ + FLIGHT_SQL_SERVER_STATEMENT_TIMEOUT = 100; + + /* + * Retrieves an int32 indicating the timeout (in milliseconds) for transactions, since transactions are not tied to a connection. + * + * If 0, there is no timeout. Servers should reset the timeout when the handle is used in a command. + */ + FLIGHT_SQL_SERVER_TRANSACTION_TIMEOUT = 101; + + // SQL Syntax Information [500-1000): provides information about SQL syntax supported by the Flight SQL Server. + + /* + * Retrieves a boolean value indicating whether the Flight SQL Server supports CREATE and DROP of catalogs. + * + * Returns: + * - false: if it doesn't support CREATE and DROP of catalogs. + * - true: if it supports CREATE and DROP of catalogs. + */ + SQL_DDL_CATALOG = 500; + + /* + * Retrieves a boolean value indicating whether the Flight SQL Server supports CREATE and DROP of schemas. + * + * Returns: + * - false: if it doesn't support CREATE and DROP of schemas. + * - true: if it supports CREATE and DROP of schemas. + */ + SQL_DDL_SCHEMA = 501; + + /* + * Indicates whether the Flight SQL Server supports CREATE and DROP of tables. + * + * Returns: + * - false: if it doesn't support CREATE and DROP of tables. + * - true: if it supports CREATE and DROP of tables. + */ + SQL_DDL_TABLE = 502; + + /* + * Retrieves a int32 ordinal representing the case sensitivity of catalog, table, schema and table names. + * + * The possible values are listed in `arrow.flight.protocol.sql.SqlSupportedCaseSensitivity`. + */ + SQL_IDENTIFIER_CASE = 503; + + // Retrieves a UTF-8 string with the supported character(s) used to surround a delimited identifier. + SQL_IDENTIFIER_QUOTE_CHAR = 504; + + /* + * Retrieves a int32 describing the case sensitivity of quoted identifiers. + * + * The possible values are listed in `arrow.flight.protocol.sql.SqlSupportedCaseSensitivity`. + */ + SQL_QUOTED_IDENTIFIER_CASE = 505; + + /* + * Retrieves a boolean value indicating whether all tables are selectable. + * + * Returns: + * - false: if not all tables are selectable or if none are; + * - true: if all tables are selectable. + */ + SQL_ALL_TABLES_ARE_SELECTABLE = 506; + + /* + * Retrieves the null ordering. + * + * Returns a int32 ordinal for the null ordering being used, as described in + * `arrow.flight.protocol.sql.SqlNullOrdering`. + */ + SQL_NULL_ORDERING = 507; + + // Retrieves a UTF-8 string list with values of the supported keywords. + SQL_KEYWORDS = 508; + + // Retrieves a UTF-8 string list with values of the supported numeric functions. + SQL_NUMERIC_FUNCTIONS = 509; + + // Retrieves a UTF-8 string list with values of the supported string functions. + SQL_STRING_FUNCTIONS = 510; + + // Retrieves a UTF-8 string list with values of the supported system functions. + SQL_SYSTEM_FUNCTIONS = 511; + + // Retrieves a UTF-8 string list with values of the supported datetime functions. + SQL_DATETIME_FUNCTIONS = 512; + + /* + * Retrieves the UTF-8 string that can be used to escape wildcard characters. + * This is the string that can be used to escape '_' or '%' in the catalog search parameters that are a pattern + * (and therefore use one of the wildcard characters). + * The '_' character represents any single character; the '%' character represents any sequence of zero or more + * characters. + */ + SQL_SEARCH_STRING_ESCAPE = 513; + + /* + * Retrieves a UTF-8 string with all the "extra" characters that can be used in unquoted identifier names + * (those beyond a-z, A-Z, 0-9 and _). + */ + SQL_EXTRA_NAME_CHARACTERS = 514; + + /* + * Retrieves a boolean value indicating whether column aliasing is supported. + * If so, the SQL AS clause can be used to provide names for computed columns or to provide alias names for columns + * as required. + * + * Returns: + * - false: if column aliasing is unsupported; + * - true: if column aliasing is supported. + */ + SQL_SUPPORTS_COLUMN_ALIASING = 515; + + /* + * Retrieves a boolean value indicating whether concatenations between null and non-null values being + * null are supported. + * + * - Returns: + * - false: if concatenations between null and non-null values being null are unsupported; + * - true: if concatenations between null and non-null values being null are supported. + */ + SQL_NULL_PLUS_NULL_IS_NULL = 516; + + /* + * Retrieves a map where the key is the type to convert from and the value is a list with the types to convert to, + * indicating the supported conversions. Each key and each item on the list value is a value to a predefined type on + * SqlSupportsConvert enum. + * The returned map will be: map> + */ + SQL_SUPPORTS_CONVERT = 517; + + /* + * Retrieves a boolean value indicating whether, when table correlation names are supported, + * they are restricted to being different from the names of the tables. + * + * Returns: + * - false: if table correlation names are unsupported; + * - true: if table correlation names are supported. + */ + SQL_SUPPORTS_TABLE_CORRELATION_NAMES = 518; + + /* + * Retrieves a boolean value indicating whether, when table correlation names are supported, + * they are restricted to being different from the names of the tables. + * + * Returns: + * - false: if different table correlation names are unsupported; + * - true: if different table correlation names are supported + */ + SQL_SUPPORTS_DIFFERENT_TABLE_CORRELATION_NAMES = 519; + + /* + * Retrieves a boolean value indicating whether expressions in ORDER BY lists are supported. + * + * Returns: + * - false: if expressions in ORDER BY are unsupported; + * - true: if expressions in ORDER BY are supported; + */ + SQL_SUPPORTS_EXPRESSIONS_IN_ORDER_BY = 520; + + /* + * Retrieves a boolean value indicating whether using a column that is not in the SELECT statement in a GROUP BY + * clause is supported. + * + * Returns: + * - false: if using a column that is not in the SELECT statement in a GROUP BY clause is unsupported; + * - true: if using a column that is not in the SELECT statement in a GROUP BY clause is supported. + */ + SQL_SUPPORTS_ORDER_BY_UNRELATED = 521; + + /* + * Retrieves the supported GROUP BY commands; + * + * Returns an int32 bitmask value representing the supported commands. + * The returned bitmask should be parsed in order to retrieve the supported commands. + * + * For instance: + * - return 0 (\b0) => [] (GROUP BY is unsupported); + * - return 1 (\b1) => [SQL_GROUP_BY_UNRELATED]; + * - return 2 (\b10) => [SQL_GROUP_BY_BEYOND_SELECT]; + * - return 3 (\b11) => [SQL_GROUP_BY_UNRELATED, SQL_GROUP_BY_BEYOND_SELECT]. + * Valid GROUP BY types are described under `arrow.flight.protocol.sql.SqlSupportedGroupBy`. + */ + SQL_SUPPORTED_GROUP_BY = 522; + + /* + * Retrieves a boolean value indicating whether specifying a LIKE escape clause is supported. + * + * Returns: + * - false: if specifying a LIKE escape clause is unsupported; + * - true: if specifying a LIKE escape clause is supported. + */ + SQL_SUPPORTS_LIKE_ESCAPE_CLAUSE = 523; + + /* + * Retrieves a boolean value indicating whether columns may be defined as non-nullable. + * + * Returns: + * - false: if columns cannot be defined as non-nullable; + * - true: if columns may be defined as non-nullable. + */ + SQL_SUPPORTS_NON_NULLABLE_COLUMNS = 524; + + /* + * Retrieves the supported SQL grammar level as per the ODBC specification. + * + * Returns an int32 bitmask value representing the supported SQL grammar level. + * The returned bitmask should be parsed in order to retrieve the supported grammar levels. + * + * For instance: + * - return 0 (\b0) => [] (SQL grammar is unsupported); + * - return 1 (\b1) => [SQL_MINIMUM_GRAMMAR]; + * - return 2 (\b10) => [SQL_CORE_GRAMMAR]; + * - return 3 (\b11) => [SQL_MINIMUM_GRAMMAR, SQL_CORE_GRAMMAR]; + * - return 4 (\b100) => [SQL_EXTENDED_GRAMMAR]; + * - return 5 (\b101) => [SQL_MINIMUM_GRAMMAR, SQL_EXTENDED_GRAMMAR]; + * - return 6 (\b110) => [SQL_CORE_GRAMMAR, SQL_EXTENDED_GRAMMAR]; + * - return 7 (\b111) => [SQL_MINIMUM_GRAMMAR, SQL_CORE_GRAMMAR, SQL_EXTENDED_GRAMMAR]. + * Valid SQL grammar levels are described under `arrow.flight.protocol.sql.SupportedSqlGrammar`. + */ + SQL_SUPPORTED_GRAMMAR = 525; + + /* + * Retrieves the supported ANSI92 SQL grammar level. + * + * Returns an int32 bitmask value representing the supported ANSI92 SQL grammar level. + * The returned bitmask should be parsed in order to retrieve the supported commands. + * + * For instance: + * - return 0 (\b0) => [] (ANSI92 SQL grammar is unsupported); + * - return 1 (\b1) => [ANSI92_ENTRY_SQL]; + * - return 2 (\b10) => [ANSI92_INTERMEDIATE_SQL]; + * - return 3 (\b11) => [ANSI92_ENTRY_SQL, ANSI92_INTERMEDIATE_SQL]; + * - return 4 (\b100) => [ANSI92_FULL_SQL]; + * - return 5 (\b101) => [ANSI92_ENTRY_SQL, ANSI92_FULL_SQL]; + * - return 6 (\b110) => [ANSI92_INTERMEDIATE_SQL, ANSI92_FULL_SQL]; + * - return 7 (\b111) => [ANSI92_ENTRY_SQL, ANSI92_INTERMEDIATE_SQL, ANSI92_FULL_SQL]. + * Valid ANSI92 SQL grammar levels are described under `arrow.flight.protocol.sql.SupportedAnsi92SqlGrammarLevel`. + */ + SQL_ANSI92_SUPPORTED_LEVEL = 526; + + /* + * Retrieves a boolean value indicating whether the SQL Integrity Enhancement Facility is supported. + * + * Returns: + * - false: if the SQL Integrity Enhancement Facility is supported; + * - true: if the SQL Integrity Enhancement Facility is supported. + */ + SQL_SUPPORTS_INTEGRITY_ENHANCEMENT_FACILITY = 527; + + /* + * Retrieves the support level for SQL OUTER JOINs. + * + * Returns a int32 ordinal for the SQL ordering being used, as described in + * `arrow.flight.protocol.sql.SqlOuterJoinsSupportLevel`. + */ + SQL_OUTER_JOINS_SUPPORT_LEVEL = 528; + + // Retrieves a UTF-8 string with the preferred term for "schema". + SQL_SCHEMA_TERM = 529; + + // Retrieves a UTF-8 string with the preferred term for "procedure". + SQL_PROCEDURE_TERM = 530; + + /* + * Retrieves a UTF-8 string with the preferred term for "catalog". + * If a empty string is returned its assumed that the server does NOT supports catalogs. + */ + SQL_CATALOG_TERM = 531; + + /* + * Retrieves a boolean value indicating whether a catalog appears at the start of a fully qualified table name. + * + * - false: if a catalog does not appear at the start of a fully qualified table name; + * - true: if a catalog appears at the start of a fully qualified table name. + */ + SQL_CATALOG_AT_START = 532; + + /* + * Retrieves the supported actions for a SQL schema. + * + * Returns an int32 bitmask value representing the supported actions for a SQL schema. + * The returned bitmask should be parsed in order to retrieve the supported actions for a SQL schema. + * + * For instance: + * - return 0 (\b0) => [] (no supported actions for SQL schema); + * - return 1 (\b1) => [SQL_ELEMENT_IN_PROCEDURE_CALLS]; + * - return 2 (\b10) => [SQL_ELEMENT_IN_INDEX_DEFINITIONS]; + * - return 3 (\b11) => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_INDEX_DEFINITIONS]; + * - return 4 (\b100) => [SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS]; + * - return 5 (\b101) => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS]; + * - return 6 (\b110) => [SQL_ELEMENT_IN_INDEX_DEFINITIONS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS]; + * - return 7 (\b111) => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_INDEX_DEFINITIONS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS]. + * Valid actions for a SQL schema described under `arrow.flight.protocol.sql.SqlSupportedElementActions`. + */ + SQL_SCHEMAS_SUPPORTED_ACTIONS = 533; + + /* + * Retrieves the supported actions for a SQL schema. + * + * Returns an int32 bitmask value representing the supported actions for a SQL catalog. + * The returned bitmask should be parsed in order to retrieve the supported actions for a SQL catalog. + * + * For instance: + * - return 0 (\b0) => [] (no supported actions for SQL catalog); + * - return 1 (\b1) => [SQL_ELEMENT_IN_PROCEDURE_CALLS]; + * - return 2 (\b10) => [SQL_ELEMENT_IN_INDEX_DEFINITIONS]; + * - return 3 (\b11) => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_INDEX_DEFINITIONS]; + * - return 4 (\b100) => [SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS]; + * - return 5 (\b101) => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS]; + * - return 6 (\b110) => [SQL_ELEMENT_IN_INDEX_DEFINITIONS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS]; + * - return 7 (\b111) => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_INDEX_DEFINITIONS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS]. + * Valid actions for a SQL catalog are described under `arrow.flight.protocol.sql.SqlSupportedElementActions`. + */ + SQL_CATALOGS_SUPPORTED_ACTIONS = 534; + + /* + * Retrieves the supported SQL positioned commands. + * + * Returns an int32 bitmask value representing the supported SQL positioned commands. + * The returned bitmask should be parsed in order to retrieve the supported SQL positioned commands. + * + * For instance: + * - return 0 (\b0) => [] (no supported SQL positioned commands); + * - return 1 (\b1) => [SQL_POSITIONED_DELETE]; + * - return 2 (\b10) => [SQL_POSITIONED_UPDATE]; + * - return 3 (\b11) => [SQL_POSITIONED_DELETE, SQL_POSITIONED_UPDATE]. + * Valid SQL positioned commands are described under `arrow.flight.protocol.sql.SqlSupportedPositionedCommands`. + */ + SQL_SUPPORTED_POSITIONED_COMMANDS = 535; + + /* + * Retrieves a boolean value indicating whether SELECT FOR UPDATE statements are supported. + * + * Returns: + * - false: if SELECT FOR UPDATE statements are unsupported; + * - true: if SELECT FOR UPDATE statements are supported. + */ + SQL_SELECT_FOR_UPDATE_SUPPORTED = 536; + + /* + * Retrieves a boolean value indicating whether stored procedure calls that use the stored procedure escape syntax + * are supported. + * + * Returns: + * - false: if stored procedure calls that use the stored procedure escape syntax are unsupported; + * - true: if stored procedure calls that use the stored procedure escape syntax are supported. + */ + SQL_STORED_PROCEDURES_SUPPORTED = 537; + + /* + * Retrieves the supported SQL subqueries. + * + * Returns an int32 bitmask value representing the supported SQL subqueries. + * The returned bitmask should be parsed in order to retrieve the supported SQL subqueries. + * + * For instance: + * - return 0 (\b0) => [] (no supported SQL subqueries); + * - return 1 (\b1) => [SQL_SUBQUERIES_IN_COMPARISONS]; + * - return 2 (\b10) => [SQL_SUBQUERIES_IN_EXISTS]; + * - return 3 (\b11) => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_EXISTS]; + * - return 4 (\b100) => [SQL_SUBQUERIES_IN_INS]; + * - return 5 (\b101) => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_INS]; + * - return 6 (\b110) => [SQL_SUBQUERIES_IN_INS, SQL_SUBQUERIES_IN_EXISTS]; + * - return 7 (\b111) => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_EXISTS, SQL_SUBQUERIES_IN_INS]; + * - return 8 (\b1000) => [SQL_SUBQUERIES_IN_QUANTIFIEDS]; + * - return 9 (\b1001) => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_QUANTIFIEDS]; + * - return 10 (\b1010) => [SQL_SUBQUERIES_IN_EXISTS, SQL_SUBQUERIES_IN_QUANTIFIEDS]; + * - return 11 (\b1011) => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_EXISTS, SQL_SUBQUERIES_IN_QUANTIFIEDS]; + * - return 12 (\b1100) => [SQL_SUBQUERIES_IN_INS, SQL_SUBQUERIES_IN_QUANTIFIEDS]; + * - return 13 (\b1101) => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_INS, SQL_SUBQUERIES_IN_QUANTIFIEDS]; + * - return 14 (\b1110) => [SQL_SUBQUERIES_IN_EXISTS, SQL_SUBQUERIES_IN_INS, SQL_SUBQUERIES_IN_QUANTIFIEDS]; + * - return 15 (\b1111) => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_EXISTS, SQL_SUBQUERIES_IN_INS, SQL_SUBQUERIES_IN_QUANTIFIEDS]; + * - ... + * Valid SQL subqueries are described under `arrow.flight.protocol.sql.SqlSupportedSubqueries`. + */ + SQL_SUPPORTED_SUBQUERIES = 538; + + /* + * Retrieves a boolean value indicating whether correlated subqueries are supported. + * + * Returns: + * - false: if correlated subqueries are unsupported; + * - true: if correlated subqueries are supported. + */ + SQL_CORRELATED_SUBQUERIES_SUPPORTED = 539; + + /* + * Retrieves the supported SQL UNIONs. + * + * Returns an int32 bitmask value representing the supported SQL UNIONs. + * The returned bitmask should be parsed in order to retrieve the supported SQL UNIONs. + * + * For instance: + * - return 0 (\b0) => [] (no supported SQL positioned commands); + * - return 1 (\b1) => [SQL_UNION]; + * - return 2 (\b10) => [SQL_UNION_ALL]; + * - return 3 (\b11) => [SQL_UNION, SQL_UNION_ALL]. + * Valid SQL positioned commands are described under `arrow.flight.protocol.sql.SqlSupportedUnions`. + */ + SQL_SUPPORTED_UNIONS = 540; + + // Retrieves a int64 value representing the maximum number of hex characters allowed in an inline binary literal. + SQL_MAX_BINARY_LITERAL_LENGTH = 541; + + // Retrieves a int64 value representing the maximum number of characters allowed for a character literal. + SQL_MAX_CHAR_LITERAL_LENGTH = 542; + + // Retrieves a int64 value representing the maximum number of characters allowed for a column name. + SQL_MAX_COLUMN_NAME_LENGTH = 543; + + // Retrieves a int64 value representing the maximum number of columns allowed in a GROUP BY clause. + SQL_MAX_COLUMNS_IN_GROUP_BY = 544; + + // Retrieves a int64 value representing the maximum number of columns allowed in an index. + SQL_MAX_COLUMNS_IN_INDEX = 545; + + // Retrieves a int64 value representing the maximum number of columns allowed in an ORDER BY clause. + SQL_MAX_COLUMNS_IN_ORDER_BY = 546; + + // Retrieves a int64 value representing the maximum number of columns allowed in a SELECT list. + SQL_MAX_COLUMNS_IN_SELECT = 547; + + // Retrieves a int64 value representing the maximum number of columns allowed in a table. + SQL_MAX_COLUMNS_IN_TABLE = 548; + + // Retrieves a int64 value representing the maximum number of concurrent connections possible. + SQL_MAX_CONNECTIONS = 549; + + // Retrieves a int64 value the maximum number of characters allowed in a cursor name. + SQL_MAX_CURSOR_NAME_LENGTH = 550; + + /* + * Retrieves a int64 value representing the maximum number of bytes allowed for an index, + * including all of the parts of the index. + */ + SQL_MAX_INDEX_LENGTH = 551; + + // Retrieves a int64 value representing the maximum number of characters allowed in a schema name. + SQL_DB_SCHEMA_NAME_LENGTH = 552; + + // Retrieves a int64 value representing the maximum number of characters allowed in a procedure name. + SQL_MAX_PROCEDURE_NAME_LENGTH = 553; + + // Retrieves a int64 value representing the maximum number of characters allowed in a catalog name. + SQL_MAX_CATALOG_NAME_LENGTH = 554; + + // Retrieves a int64 value representing the maximum number of bytes allowed in a single row. + SQL_MAX_ROW_SIZE = 555; + + /* + * Retrieves a boolean indicating whether the return value for the JDBC method getMaxRowSize includes the SQL + * data types LONGVARCHAR and LONGVARBINARY. + * + * Returns: + * - false: if return value for the JDBC method getMaxRowSize does + * not include the SQL data types LONGVARCHAR and LONGVARBINARY; + * - true: if return value for the JDBC method getMaxRowSize includes + * the SQL data types LONGVARCHAR and LONGVARBINARY. + */ + SQL_MAX_ROW_SIZE_INCLUDES_BLOBS = 556; + + /* + * Retrieves a int64 value representing the maximum number of characters allowed for an SQL statement; + * a result of 0 (zero) means that there is no limit or the limit is not known. + */ + SQL_MAX_STATEMENT_LENGTH = 557; + + // Retrieves a int64 value representing the maximum number of active statements that can be open at the same time. + SQL_MAX_STATEMENTS = 558; + + // Retrieves a int64 value representing the maximum number of characters allowed in a table name. + SQL_MAX_TABLE_NAME_LENGTH = 559; + + // Retrieves a int64 value representing the maximum number of tables allowed in a SELECT statement. + SQL_MAX_TABLES_IN_SELECT = 560; + + // Retrieves a int64 value representing the maximum number of characters allowed in a user name. + SQL_MAX_USERNAME_LENGTH = 561; + + /* + * Retrieves this database's default transaction isolation level as described in + * `arrow.flight.protocol.sql.SqlTransactionIsolationLevel`. + * + * Returns a int32 ordinal for the SQL transaction isolation level. + */ + SQL_DEFAULT_TRANSACTION_ISOLATION = 562; + + /* + * Retrieves a boolean value indicating whether transactions are supported. If not, invoking the method commit is a + * noop, and the isolation level is `arrow.flight.protocol.sql.SqlTransactionIsolationLevel.TRANSACTION_NONE`. + * + * Returns: + * - false: if transactions are unsupported; + * - true: if transactions are supported. + */ + SQL_TRANSACTIONS_SUPPORTED = 563; + + /* + * Retrieves the supported transactions isolation levels. + * + * Returns an int32 bitmask value representing the supported transactions isolation levels. + * The returned bitmask should be parsed in order to retrieve the supported transactions isolation levels. + * + * For instance: + * - return 0 (\b0) => [] (no supported SQL transactions isolation levels); + * - return 1 (\b1) => [SQL_TRANSACTION_NONE]; + * - return 2 (\b10) => [SQL_TRANSACTION_READ_UNCOMMITTED]; + * - return 3 (\b11) => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_READ_UNCOMMITTED]; + * - return 4 (\b100) => [SQL_TRANSACTION_REPEATABLE_READ]; + * - return 5 (\b101) => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_REPEATABLE_READ]; + * - return 6 (\b110) => [SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ]; + * - return 7 (\b111) => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ]; + * - return 8 (\b1000) => [SQL_TRANSACTION_REPEATABLE_READ]; + * - return 9 (\b1001) => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_REPEATABLE_READ]; + * - return 10 (\b1010) => [SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ]; + * - return 11 (\b1011) => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ]; + * - return 12 (\b1100) => [SQL_TRANSACTION_REPEATABLE_READ, SQL_TRANSACTION_REPEATABLE_READ]; + * - return 13 (\b1101) => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_REPEATABLE_READ, SQL_TRANSACTION_REPEATABLE_READ]; + * - return 14 (\b1110) => [SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ, SQL_TRANSACTION_REPEATABLE_READ]; + * - return 15 (\b1111) => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ, SQL_TRANSACTION_REPEATABLE_READ]; + * - return 16 (\b10000) => [SQL_TRANSACTION_SERIALIZABLE]; + * - ... + * Valid SQL positioned commands are described under `arrow.flight.protocol.sql.SqlTransactionIsolationLevel`. + */ + SQL_SUPPORTED_TRANSACTIONS_ISOLATION_LEVELS = 564; + + /* + * Retrieves a boolean value indicating whether a data definition statement within a transaction forces + * the transaction to commit. + * + * Returns: + * - false: if a data definition statement within a transaction does not force the transaction to commit; + * - true: if a data definition statement within a transaction forces the transaction to commit. + */ + SQL_DATA_DEFINITION_CAUSES_TRANSACTION_COMMIT = 565; + + /* + * Retrieves a boolean value indicating whether a data definition statement within a transaction is ignored. + * + * Returns: + * - false: if a data definition statement within a transaction is taken into account; + * - true: a data definition statement within a transaction is ignored. + */ + SQL_DATA_DEFINITIONS_IN_TRANSACTIONS_IGNORED = 566; + + /* + * Retrieves an int32 bitmask value representing the supported result set types. + * The returned bitmask should be parsed in order to retrieve the supported result set types. + * + * For instance: + * - return 0 (\b0) => [] (no supported result set types); + * - return 1 (\b1) => [SQL_RESULT_SET_TYPE_UNSPECIFIED]; + * - return 2 (\b10) => [SQL_RESULT_SET_TYPE_FORWARD_ONLY]; + * - return 3 (\b11) => [SQL_RESULT_SET_TYPE_UNSPECIFIED, SQL_RESULT_SET_TYPE_FORWARD_ONLY]; + * - return 4 (\b100) => [SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE]; + * - return 5 (\b101) => [SQL_RESULT_SET_TYPE_UNSPECIFIED, SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE]; + * - return 6 (\b110) => [SQL_RESULT_SET_TYPE_FORWARD_ONLY, SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE]; + * - return 7 (\b111) => [SQL_RESULT_SET_TYPE_UNSPECIFIED, SQL_RESULT_SET_TYPE_FORWARD_ONLY, SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE]; + * - return 8 (\b1000) => [SQL_RESULT_SET_TYPE_SCROLL_SENSITIVE]; + * - ... + * Valid result set types are described under `arrow.flight.protocol.sql.SqlSupportedResultSetType`. + */ + SQL_SUPPORTED_RESULT_SET_TYPES = 567; + + /* + * Returns an int32 bitmask value concurrency types supported for + * `arrow.flight.protocol.sql.SqlSupportedResultSetType.SQL_RESULT_SET_TYPE_UNSPECIFIED`. + * + * For instance: + * - return 0 (\b0) => [] (no supported concurrency types for this result set type) + * - return 1 (\b1) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED] + * - return 2 (\b10) => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY] + * - return 3 (\b11) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY] + * - return 4 (\b100) => [SQL_RESULT_SET_CONCURRENCY_UPDATABLE] + * - return 5 (\b101) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] + * - return 6 (\b110) => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] + * - return 7 (\b111) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] + * Valid result set types are described under `arrow.flight.protocol.sql.SqlSupportedResultSetConcurrency`. + */ + SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_UNSPECIFIED = 568; + + /* + * Returns an int32 bitmask value concurrency types supported for + * `arrow.flight.protocol.sql.SqlSupportedResultSetType.SQL_RESULT_SET_TYPE_FORWARD_ONLY`. + * + * For instance: + * - return 0 (\b0) => [] (no supported concurrency types for this result set type) + * - return 1 (\b1) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED] + * - return 2 (\b10) => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY] + * - return 3 (\b11) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY] + * - return 4 (\b100) => [SQL_RESULT_SET_CONCURRENCY_UPDATABLE] + * - return 5 (\b101) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] + * - return 6 (\b110) => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] + * - return 7 (\b111) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] + * Valid result set types are described under `arrow.flight.protocol.sql.SqlSupportedResultSetConcurrency`. + */ + SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_FORWARD_ONLY = 569; + + /* + * Returns an int32 bitmask value concurrency types supported for + * `arrow.flight.protocol.sql.SqlSupportedResultSetType.SQL_RESULT_SET_TYPE_SCROLL_SENSITIVE`. + * + * For instance: + * - return 0 (\b0) => [] (no supported concurrency types for this result set type) + * - return 1 (\b1) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED] + * - return 2 (\b10) => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY] + * - return 3 (\b11) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY] + * - return 4 (\b100) => [SQL_RESULT_SET_CONCURRENCY_UPDATABLE] + * - return 5 (\b101) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] + * - return 6 (\b110) => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] + * - return 7 (\b111) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] + * Valid result set types are described under `arrow.flight.protocol.sql.SqlSupportedResultSetConcurrency`. + */ + SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_SCROLL_SENSITIVE = 570; + + /* + * Returns an int32 bitmask value concurrency types supported for + * `arrow.flight.protocol.sql.SqlSupportedResultSetType.SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE`. + * + * For instance: + * - return 0 (\b0) => [] (no supported concurrency types for this result set type) + * - return 1 (\b1) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED] + * - return 2 (\b10) => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY] + * - return 3 (\b11) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY] + * - return 4 (\b100) => [SQL_RESULT_SET_CONCURRENCY_UPDATABLE] + * - return 5 (\b101) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] + * - return 6 (\b110) => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] + * - return 7 (\b111) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] + * Valid result set types are described under `arrow.flight.protocol.sql.SqlSupportedResultSetConcurrency`. + */ + SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_SCROLL_INSENSITIVE = 571; + + /* + * Retrieves a boolean value indicating whether this database supports batch updates. + * + * - false: if this database does not support batch updates; + * - true: if this database supports batch updates. + */ + SQL_BATCH_UPDATES_SUPPORTED = 572; + + /* + * Retrieves a boolean value indicating whether this database supports savepoints. + * + * Returns: + * - false: if this database does not support savepoints; + * - true: if this database supports savepoints. + */ + SQL_SAVEPOINTS_SUPPORTED = 573; + + /* + * Retrieves a boolean value indicating whether named parameters are supported in callable statements. + * + * Returns: + * - false: if named parameters in callable statements are unsupported; + * - true: if named parameters in callable statements are supported. + */ + SQL_NAMED_PARAMETERS_SUPPORTED = 574; + + /* + * Retrieves a boolean value indicating whether updates made to a LOB are made on a copy or directly to the LOB. + * + * Returns: + * - false: if updates made to a LOB are made directly to the LOB; + * - true: if updates made to a LOB are made on a copy. + */ + SQL_LOCATORS_UPDATE_COPY = 575; + + /* + * Retrieves a boolean value indicating whether invoking user-defined or vendor functions + * using the stored procedure escape syntax is supported. + * + * Returns: + * - false: if invoking user-defined or vendor functions using the stored procedure escape syntax is unsupported; + * - true: if invoking user-defined or vendor functions using the stored procedure escape syntax is supported. + */ + SQL_STORED_FUNCTIONS_USING_CALL_SYNTAX_SUPPORTED = 576; +} + +// The level of support for Flight SQL transaction RPCs. +enum SqlSupportedTransaction { + // Unknown/not indicated/no support + SQL_SUPPORTED_TRANSACTION_NONE = 0; + // Transactions, but not savepoints. + // A savepoint is a mark within a transaction that can be individually + // rolled back to. Not all databases support savepoints. + SQL_SUPPORTED_TRANSACTION_TRANSACTION = 1; + // Transactions and savepoints + SQL_SUPPORTED_TRANSACTION_SAVEPOINT = 2; +} + +enum SqlSupportedCaseSensitivity { + SQL_CASE_SENSITIVITY_UNKNOWN = 0; + SQL_CASE_SENSITIVITY_CASE_INSENSITIVE = 1; + SQL_CASE_SENSITIVITY_UPPERCASE = 2; + SQL_CASE_SENSITIVITY_LOWERCASE = 3; +} + +enum SqlNullOrdering { + SQL_NULLS_SORTED_HIGH = 0; + SQL_NULLS_SORTED_LOW = 1; + SQL_NULLS_SORTED_AT_START = 2; + SQL_NULLS_SORTED_AT_END = 3; +} + +enum SupportedSqlGrammar { + SQL_MINIMUM_GRAMMAR = 0; + SQL_CORE_GRAMMAR = 1; + SQL_EXTENDED_GRAMMAR = 2; +} + +enum SupportedAnsi92SqlGrammarLevel { + ANSI92_ENTRY_SQL = 0; + ANSI92_INTERMEDIATE_SQL = 1; + ANSI92_FULL_SQL = 2; +} + +enum SqlOuterJoinsSupportLevel { + SQL_JOINS_UNSUPPORTED = 0; + SQL_LIMITED_OUTER_JOINS = 1; + SQL_FULL_OUTER_JOINS = 2; +} + +enum SqlSupportedGroupBy { + SQL_GROUP_BY_UNRELATED = 0; + SQL_GROUP_BY_BEYOND_SELECT = 1; +} + +enum SqlSupportedElementActions { + SQL_ELEMENT_IN_PROCEDURE_CALLS = 0; + SQL_ELEMENT_IN_INDEX_DEFINITIONS = 1; + SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS = 2; +} + +enum SqlSupportedPositionedCommands { + SQL_POSITIONED_DELETE = 0; + SQL_POSITIONED_UPDATE = 1; +} + +enum SqlSupportedSubqueries { + SQL_SUBQUERIES_IN_COMPARISONS = 0; + SQL_SUBQUERIES_IN_EXISTS = 1; + SQL_SUBQUERIES_IN_INS = 2; + SQL_SUBQUERIES_IN_QUANTIFIEDS = 3; +} + +enum SqlSupportedUnions { + SQL_UNION = 0; + SQL_UNION_ALL = 1; +} + +enum SqlTransactionIsolationLevel { + SQL_TRANSACTION_NONE = 0; + SQL_TRANSACTION_READ_UNCOMMITTED = 1; + SQL_TRANSACTION_READ_COMMITTED = 2; + SQL_TRANSACTION_REPEATABLE_READ = 3; + SQL_TRANSACTION_SERIALIZABLE = 4; +} + +enum SqlSupportedTransactions { + SQL_TRANSACTION_UNSPECIFIED = 0; + SQL_DATA_DEFINITION_TRANSACTIONS = 1; + SQL_DATA_MANIPULATION_TRANSACTIONS = 2; +} + +enum SqlSupportedResultSetType { + SQL_RESULT_SET_TYPE_UNSPECIFIED = 0; + SQL_RESULT_SET_TYPE_FORWARD_ONLY = 1; + SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE = 2; + SQL_RESULT_SET_TYPE_SCROLL_SENSITIVE = 3; +} + +enum SqlSupportedResultSetConcurrency { + SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED = 0; + SQL_RESULT_SET_CONCURRENCY_READ_ONLY = 1; + SQL_RESULT_SET_CONCURRENCY_UPDATABLE = 2; +} + +enum SqlSupportsConvert { + SQL_CONVERT_BIGINT = 0; + SQL_CONVERT_BINARY = 1; + SQL_CONVERT_BIT = 2; + SQL_CONVERT_CHAR = 3; + SQL_CONVERT_DATE = 4; + SQL_CONVERT_DECIMAL = 5; + SQL_CONVERT_FLOAT = 6; + SQL_CONVERT_INTEGER = 7; + SQL_CONVERT_INTERVAL_DAY_TIME = 8; + SQL_CONVERT_INTERVAL_YEAR_MONTH = 9; + SQL_CONVERT_LONGVARBINARY = 10; + SQL_CONVERT_LONGVARCHAR = 11; + SQL_CONVERT_NUMERIC = 12; + SQL_CONVERT_REAL = 13; + SQL_CONVERT_SMALLINT = 14; + SQL_CONVERT_TIME = 15; + SQL_CONVERT_TIMESTAMP = 16; + SQL_CONVERT_TINYINT = 17; + SQL_CONVERT_VARBINARY = 18; + SQL_CONVERT_VARCHAR = 19; +} + +/** + * The JDBC/ODBC-defined type of any object. + * All the values here are the same as in the JDBC and ODBC specs. + */ +enum XdbcDataType { + XDBC_UNKNOWN_TYPE = 0; + XDBC_CHAR = 1; + XDBC_NUMERIC = 2; + XDBC_DECIMAL = 3; + XDBC_INTEGER = 4; + XDBC_SMALLINT = 5; + XDBC_FLOAT = 6; + XDBC_REAL = 7; + XDBC_DOUBLE = 8; + XDBC_DATETIME = 9; + XDBC_INTERVAL = 10; + XDBC_VARCHAR = 12; + XDBC_DATE = 91; + XDBC_TIME = 92; + XDBC_TIMESTAMP = 93; + XDBC_LONGVARCHAR = -1; + XDBC_BINARY = -2; + XDBC_VARBINARY = -3; + XDBC_LONGVARBINARY = -4; + XDBC_BIGINT = -5; + XDBC_TINYINT = -6; + XDBC_BIT = -7; + XDBC_WCHAR = -8; + XDBC_WVARCHAR = -9; +} + +/** + * Detailed subtype information for XDBC_TYPE_DATETIME and XDBC_TYPE_INTERVAL. + */ +enum XdbcDatetimeSubcode { + option allow_alias = true; + XDBC_SUBCODE_UNKNOWN = 0; + XDBC_SUBCODE_YEAR = 1; + XDBC_SUBCODE_DATE = 1; + XDBC_SUBCODE_TIME = 2; + XDBC_SUBCODE_MONTH = 2; + XDBC_SUBCODE_TIMESTAMP = 3; + XDBC_SUBCODE_DAY = 3; + XDBC_SUBCODE_TIME_WITH_TIMEZONE = 4; + XDBC_SUBCODE_HOUR = 4; + XDBC_SUBCODE_TIMESTAMP_WITH_TIMEZONE = 5; + XDBC_SUBCODE_MINUTE = 5; + XDBC_SUBCODE_SECOND = 6; + XDBC_SUBCODE_YEAR_TO_MONTH = 7; + XDBC_SUBCODE_DAY_TO_HOUR = 8; + XDBC_SUBCODE_DAY_TO_MINUTE = 9; + XDBC_SUBCODE_DAY_TO_SECOND = 10; + XDBC_SUBCODE_HOUR_TO_MINUTE = 11; + XDBC_SUBCODE_HOUR_TO_SECOND = 12; + XDBC_SUBCODE_MINUTE_TO_SECOND = 13; + XDBC_SUBCODE_INTERVAL_YEAR = 101; + XDBC_SUBCODE_INTERVAL_MONTH = 102; + XDBC_SUBCODE_INTERVAL_DAY = 103; + XDBC_SUBCODE_INTERVAL_HOUR = 104; + XDBC_SUBCODE_INTERVAL_MINUTE = 105; + XDBC_SUBCODE_INTERVAL_SECOND = 106; + XDBC_SUBCODE_INTERVAL_YEAR_TO_MONTH = 107; + XDBC_SUBCODE_INTERVAL_DAY_TO_HOUR = 108; + XDBC_SUBCODE_INTERVAL_DAY_TO_MINUTE = 109; + XDBC_SUBCODE_INTERVAL_DAY_TO_SECOND = 110; + XDBC_SUBCODE_INTERVAL_HOUR_TO_MINUTE = 111; + XDBC_SUBCODE_INTERVAL_HOUR_TO_SECOND = 112; + XDBC_SUBCODE_INTERVAL_MINUTE_TO_SECOND = 113; +} + +enum Nullable { + /** + * Indicates that the fields does not allow the use of null values. + */ + NULLABILITY_NO_NULLS = 0; + + /** + * Indicates that the fields allow the use of null values. + */ + NULLABILITY_NULLABLE = 1; + + /** + * Indicates that nullability of the fields cannot be determined. + */ + NULLABILITY_UNKNOWN = 2; +} + +enum Searchable { + /** + * Indicates that column cannot be used in a WHERE clause. + */ + SEARCHABLE_NONE = 0; + + /** + * Indicates that the column can be used in a WHERE clause if it is using a + * LIKE operator. + */ + SEARCHABLE_CHAR = 1; + + /** + * Indicates that the column can be used In a WHERE clause with any + * operator other than LIKE. + * + * - Allowed operators: comparison, quantified comparison, BETWEEN, + * DISTINCT, IN, MATCH, and UNIQUE. + */ + SEARCHABLE_BASIC = 2; + + /** + * Indicates that the column can be used in a WHERE clause using any operator. + */ + SEARCHABLE_FULL = 3; +} + +/* + * Represents a request to retrieve information about data type supported on a Flight SQL enabled backend. + * Used in the command member of FlightDescriptor for the following RPC calls: + * - GetSchema: return the schema of the query. + * - GetFlightInfo: execute the catalog metadata request. + * + * The returned schema will be: + * < + * type_name: utf8 not null (The name of the data type, for example: VARCHAR, INTEGER, etc), + * data_type: int32 not null (The SQL data type), + * column_size: int32 (The maximum size supported by that column. + * In case of exact numeric types, this represents the maximum precision. + * In case of string types, this represents the character length. + * In case of datetime data types, this represents the length in characters of the string representation. + * NULL is returned for data types where column size is not applicable.), + * literal_prefix: utf8 (Character or characters used to prefix a literal, NULL is returned for + * data types where a literal prefix is not applicable.), + * literal_suffix: utf8 (Character or characters used to terminate a literal, + * NULL is returned for data types where a literal suffix is not applicable.), + * create_params: list + * (A list of keywords corresponding to which parameters can be used when creating + * a column for that specific type. + * NULL is returned if there are no parameters for the data type definition.), + * nullable: int32 not null (Shows if the data type accepts a NULL value. The possible values can be seen in the + * Nullable enum.), + * case_sensitive: bool not null (Shows if a character data type is case-sensitive in collations and comparisons), + * searchable: int32 not null (Shows how the data type is used in a WHERE clause. The possible values can be seen in the + * Searchable enum.), + * unsigned_attribute: bool (Shows if the data type is unsigned. NULL is returned if the attribute is + * not applicable to the data type or the data type is not numeric.), + * fixed_prec_scale: bool not null (Shows if the data type has predefined fixed precision and scale.), + * auto_increment: bool (Shows if the data type is auto incremental. NULL is returned if the attribute + * is not applicable to the data type or the data type is not numeric.), + * local_type_name: utf8 (Localized version of the data source-dependent name of the data type. NULL + * is returned if a localized name is not supported by the data source), + * minimum_scale: int32 (The minimum scale of the data type on the data source. + * If a data type has a fixed scale, the MINIMUM_SCALE and MAXIMUM_SCALE + * columns both contain this value. NULL is returned if scale is not applicable.), + * maximum_scale: int32 (The maximum scale of the data type on the data source. + * NULL is returned if scale is not applicable.), + * sql_data_type: int32 not null (The value of the SQL DATA TYPE which has the same values + * as data_type value. Except for interval and datetime, which + * uses generic values. More info about those types can be + * obtained through datetime_subcode. The possible values can be seen + * in the XdbcDataType enum.), + * datetime_subcode: int32 (Only used when the SQL DATA TYPE is interval or datetime. It contains + * its sub types. For type different from interval and datetime, this value + * is NULL. The possible values can be seen in the XdbcDatetimeSubcode enum.), + * num_prec_radix: int32 (If the data type is an approximate numeric type, this column contains + * the value 2 to indicate that COLUMN_SIZE specifies a number of bits. For + * exact numeric types, this column contains the value 10 to indicate that + * column size specifies a number of decimal digits. Otherwise, this column is NULL.), + * interval_precision: int32 (If the data type is an interval data type, then this column contains the value + * of the interval leading precision. Otherwise, this column is NULL. This fields + * is only relevant to be used by ODBC). + * > + * The returned data should be ordered by data_type and then by type_name. + */ +message CommandGetXdbcTypeInfo { + option (experimental) = true; + + /* + * Specifies the data type to search for the info. + */ + optional int32 data_type = 1; +} + +/* + * Represents a request to retrieve the list of catalogs on a Flight SQL enabled backend. + * The definition of a catalog depends on vendor/implementation. It is usually the database itself + * Used in the command member of FlightDescriptor for the following RPC calls: + * - GetSchema: return the Arrow schema of the query. + * - GetFlightInfo: execute the catalog metadata request. + * + * The returned Arrow schema will be: + * < + * catalog_name: utf8 not null + * > + * The returned data should be ordered by catalog_name. + */ +message CommandGetCatalogs { + option (experimental) = true; +} + +/* + * Represents a request to retrieve the list of database schemas on a Flight SQL enabled backend. + * The definition of a database schema depends on vendor/implementation. It is usually a collection of tables. + * Used in the command member of FlightDescriptor for the following RPC calls: + * - GetSchema: return the Arrow schema of the query. + * - GetFlightInfo: execute the catalog metadata request. + * + * The returned Arrow schema will be: + * < + * catalog_name: utf8, + * db_schema_name: utf8 not null + * > + * The returned data should be ordered by catalog_name, then db_schema_name. + */ +message CommandGetDbSchemas { + option (experimental) = true; + + /* + * Specifies the Catalog to search for the tables. + * An empty string retrieves those without a catalog. + * If omitted the catalog name should not be used to narrow the search. + */ + optional string catalog = 1; + + /* + * Specifies a filter pattern for schemas to search for. + * When no db_schema_filter_pattern is provided, the pattern will not be used to narrow the search. + * In the pattern string, two special characters can be used to denote matching rules: + * - "%" means to match any substring with 0 or more characters. + * - "_" means to match any one character. + */ + optional string db_schema_filter_pattern = 2; +} + +/* + * Represents a request to retrieve the list of tables, and optionally their schemas, on a Flight SQL enabled backend. + * Used in the command member of FlightDescriptor for the following RPC calls: + * - GetSchema: return the Arrow schema of the query. + * - GetFlightInfo: execute the catalog metadata request. + * + * The returned Arrow schema will be: + * < + * catalog_name: utf8, + * db_schema_name: utf8, + * table_name: utf8 not null, + * table_type: utf8 not null, + * [optional] table_schema: bytes not null (schema of the table as described in Schema.fbs::Schema, + * it is serialized as an IPC message.) + * > + * Fields on table_schema may contain the following metadata: + * - ARROW:FLIGHT:SQL:CATALOG_NAME - Table's catalog name + * - ARROW:FLIGHT:SQL:DB_SCHEMA_NAME - Database schema name + * - ARROW:FLIGHT:SQL:TABLE_NAME - Table name + * - ARROW:FLIGHT:SQL:TYPE_NAME - The data source-specific name for the data type of the column. + * - ARROW:FLIGHT:SQL:PRECISION - Column precision/size + * - ARROW:FLIGHT:SQL:SCALE - Column scale/decimal digits if applicable + * - ARROW:FLIGHT:SQL:IS_AUTO_INCREMENT - "1" indicates if the column is auto incremented, "0" otherwise. + * - ARROW:FLIGHT:SQL:IS_CASE_SENSITIVE - "1" indicates if the column is case-sensitive, "0" otherwise. + * - ARROW:FLIGHT:SQL:IS_READ_ONLY - "1" indicates if the column is read only, "0" otherwise. + * - ARROW:FLIGHT:SQL:IS_SEARCHABLE - "1" indicates if the column is searchable via WHERE clause, "0" otherwise. + * The returned data should be ordered by catalog_name, db_schema_name, table_name, then table_type, followed by table_schema if requested. + */ +message CommandGetTables { + option (experimental) = true; + + /* + * Specifies the Catalog to search for the tables. + * An empty string retrieves those without a catalog. + * If omitted the catalog name should not be used to narrow the search. + */ + optional string catalog = 1; + + /* + * Specifies a filter pattern for schemas to search for. + * When no db_schema_filter_pattern is provided, all schemas matching other filters are searched. + * In the pattern string, two special characters can be used to denote matching rules: + * - "%" means to match any substring with 0 or more characters. + * - "_" means to match any one character. + */ + optional string db_schema_filter_pattern = 2; + + /* + * Specifies a filter pattern for tables to search for. + * When no table_name_filter_pattern is provided, all tables matching other filters are searched. + * In the pattern string, two special characters can be used to denote matching rules: + * - "%" means to match any substring with 0 or more characters. + * - "_" means to match any one character. + */ + optional string table_name_filter_pattern = 3; + + /* + * Specifies a filter of table types which must match. + * The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables. + * TABLE, VIEW, and SYSTEM TABLE are commonly supported. + */ + repeated string table_types = 4; + + // Specifies if the Arrow schema should be returned for found tables. + bool include_schema = 5; +} + +/* + * Represents a request to retrieve the list of table types on a Flight SQL enabled backend. + * The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables. + * TABLE, VIEW, and SYSTEM TABLE are commonly supported. + * Used in the command member of FlightDescriptor for the following RPC calls: + * - GetSchema: return the Arrow schema of the query. + * - GetFlightInfo: execute the catalog metadata request. + * + * The returned Arrow schema will be: + * < + * table_type: utf8 not null + * > + * The returned data should be ordered by table_type. + */ +message CommandGetTableTypes { + option (experimental) = true; +} + +/* + * Represents a request to retrieve the primary keys of a table on a Flight SQL enabled backend. + * Used in the command member of FlightDescriptor for the following RPC calls: + * - GetSchema: return the Arrow schema of the query. + * - GetFlightInfo: execute the catalog metadata request. + * + * The returned Arrow schema will be: + * < + * catalog_name: utf8, + * db_schema_name: utf8, + * table_name: utf8 not null, + * column_name: utf8 not null, + * key_name: utf8, + * key_sequence: int32 not null + * > + * The returned data should be ordered by catalog_name, db_schema_name, table_name, key_name, then key_sequence. + */ +message CommandGetPrimaryKeys { + option (experimental) = true; + + /* + * Specifies the catalog to search for the table. + * An empty string retrieves those without a catalog. + * If omitted the catalog name should not be used to narrow the search. + */ + optional string catalog = 1; + + /* + * Specifies the schema to search for the table. + * An empty string retrieves those without a schema. + * If omitted the schema name should not be used to narrow the search. + */ + optional string db_schema = 2; + + // Specifies the table to get the primary keys for. + string table = 3; +} + +enum UpdateDeleteRules { + CASCADE = 0; + RESTRICT = 1; + SET_NULL = 2; + NO_ACTION = 3; + SET_DEFAULT = 4; +} + +/* + * Represents a request to retrieve a description of the foreign key columns that reference the given table's + * primary key columns (the foreign keys exported by a table) of a table on a Flight SQL enabled backend. + * Used in the command member of FlightDescriptor for the following RPC calls: + * - GetSchema: return the Arrow schema of the query. + * - GetFlightInfo: execute the catalog metadata request. + * + * The returned Arrow schema will be: + * < + * pk_catalog_name: utf8, + * pk_db_schema_name: utf8, + * pk_table_name: utf8 not null, + * pk_column_name: utf8 not null, + * fk_catalog_name: utf8, + * fk_db_schema_name: utf8, + * fk_table_name: utf8 not null, + * fk_column_name: utf8 not null, + * key_sequence: int32 not null, + * fk_key_name: utf8, + * pk_key_name: utf8, + * update_rule: uint8 not null, + * delete_rule: uint8 not null + * > + * The returned data should be ordered by fk_catalog_name, fk_db_schema_name, fk_table_name, fk_key_name, then key_sequence. + * update_rule and delete_rule returns a byte that is equivalent to actions declared on UpdateDeleteRules enum. + */ +message CommandGetExportedKeys { + option (experimental) = true; + + /* + * Specifies the catalog to search for the foreign key table. + * An empty string retrieves those without a catalog. + * If omitted the catalog name should not be used to narrow the search. + */ + optional string catalog = 1; + + /* + * Specifies the schema to search for the foreign key table. + * An empty string retrieves those without a schema. + * If omitted the schema name should not be used to narrow the search. + */ + optional string db_schema = 2; + + // Specifies the foreign key table to get the foreign keys for. + string table = 3; +} + +/* + * Represents a request to retrieve the foreign keys of a table on a Flight SQL enabled backend. + * Used in the command member of FlightDescriptor for the following RPC calls: + * - GetSchema: return the Arrow schema of the query. + * - GetFlightInfo: execute the catalog metadata request. + * + * The returned Arrow schema will be: + * < + * pk_catalog_name: utf8, + * pk_db_schema_name: utf8, + * pk_table_name: utf8 not null, + * pk_column_name: utf8 not null, + * fk_catalog_name: utf8, + * fk_db_schema_name: utf8, + * fk_table_name: utf8 not null, + * fk_column_name: utf8 not null, + * key_sequence: int32 not null, + * fk_key_name: utf8, + * pk_key_name: utf8, + * update_rule: uint8 not null, + * delete_rule: uint8 not null + * > + * The returned data should be ordered by pk_catalog_name, pk_db_schema_name, pk_table_name, pk_key_name, then key_sequence. + * update_rule and delete_rule returns a byte that is equivalent to actions: + * - 0 = CASCADE + * - 1 = RESTRICT + * - 2 = SET NULL + * - 3 = NO ACTION + * - 4 = SET DEFAULT + */ +message CommandGetImportedKeys { + option (experimental) = true; + + /* + * Specifies the catalog to search for the primary key table. + * An empty string retrieves those without a catalog. + * If omitted the catalog name should not be used to narrow the search. + */ + optional string catalog = 1; + + /* + * Specifies the schema to search for the primary key table. + * An empty string retrieves those without a schema. + * If omitted the schema name should not be used to narrow the search. + */ + optional string db_schema = 2; + + // Specifies the primary key table to get the foreign keys for. + string table = 3; +} + +/* + * Represents a request to retrieve a description of the foreign key columns in the given foreign key table that + * reference the primary key or the columns representing a unique constraint of the parent table (could be the same + * or a different table) on a Flight SQL enabled backend. + * Used in the command member of FlightDescriptor for the following RPC calls: + * - GetSchema: return the Arrow schema of the query. + * - GetFlightInfo: execute the catalog metadata request. + * + * The returned Arrow schema will be: + * < + * pk_catalog_name: utf8, + * pk_db_schema_name: utf8, + * pk_table_name: utf8 not null, + * pk_column_name: utf8 not null, + * fk_catalog_name: utf8, + * fk_db_schema_name: utf8, + * fk_table_name: utf8 not null, + * fk_column_name: utf8 not null, + * key_sequence: int32 not null, + * fk_key_name: utf8, + * pk_key_name: utf8, + * update_rule: uint8 not null, + * delete_rule: uint8 not null + * > + * The returned data should be ordered by pk_catalog_name, pk_db_schema_name, pk_table_name, pk_key_name, then key_sequence. + * update_rule and delete_rule returns a byte that is equivalent to actions: + * - 0 = CASCADE + * - 1 = RESTRICT + * - 2 = SET NULL + * - 3 = NO ACTION + * - 4 = SET DEFAULT + */ +message CommandGetCrossReference { + option (experimental) = true; + + /** + * The catalog name where the parent table is. + * An empty string retrieves those without a catalog. + * If omitted the catalog name should not be used to narrow the search. + */ + optional string pk_catalog = 1; + + /** + * The Schema name where the parent table is. + * An empty string retrieves those without a schema. + * If omitted the schema name should not be used to narrow the search. + */ + optional string pk_db_schema = 2; + + /** + * The parent table name. It cannot be null. + */ + string pk_table = 3; + + /** + * The catalog name where the foreign table is. + * An empty string retrieves those without a catalog. + * If omitted the catalog name should not be used to narrow the search. + */ + optional string fk_catalog = 4; + + /** + * The schema name where the foreign table is. + * An empty string retrieves those without a schema. + * If omitted the schema name should not be used to narrow the search. + */ + optional string fk_db_schema = 5; + + /** + * The foreign table name. It cannot be null. + */ + string fk_table = 6; +} + +// Query Execution Action Messages + +/* + * Request message for the "CreatePreparedStatement" action on a Flight SQL enabled backend. + */ +message ActionCreatePreparedStatementRequest { + option (experimental) = true; + + // The valid SQL string to create a prepared statement for. + string query = 1; + // Create/execute the prepared statement as part of this transaction (if + // unset, executions of the prepared statement will be auto-committed). + optional bytes transaction_id = 2; +} + +/* + * An embedded message describing a Substrait plan to execute. + */ +message SubstraitPlan { + option (experimental) = true; + + // The serialized substrait.Plan to create a prepared statement for. + // XXX(ARROW-16902): this is bytes instead of an embedded message + // because Protobuf does not really support one DLL using Protobuf + // definitions from another DLL. + bytes plan = 1; + // The Substrait release, e.g. "0.12.0". This information is not + // tracked in the plan itself, so this is the only way for consumers + // to potentially know if they can handle the plan. + string version = 2; +} + +/* + * Request message for the "CreatePreparedSubstraitPlan" action on a Flight SQL enabled backend. + */ +message ActionCreatePreparedSubstraitPlanRequest { + option (experimental) = true; + + // The serialized substrait.Plan to create a prepared statement for. + SubstraitPlan plan = 1; + // Create/execute the prepared statement as part of this transaction (if + // unset, executions of the prepared statement will be auto-committed). + optional bytes transaction_id = 2; +} + +/* + * Wrap the result of a "CreatePreparedStatement" or "CreatePreparedSubstraitPlan" action. + * + * The resultant PreparedStatement can be closed either: + * - Manually, through the "ClosePreparedStatement" action; + * - Automatically, by a server timeout. + * + * The result should be wrapped in a google.protobuf.Any message. + */ +message ActionCreatePreparedStatementResult { + option (experimental) = true; + + // Opaque handle for the prepared statement on the server. + bytes prepared_statement_handle = 1; + + // If a result set generating query was provided, dataset_schema contains the + // schema of the result set. It should be an IPC-encapsulated Schema, as described in Schema.fbs. + // For some queries, the schema of the results may depend on the schema of the parameters. The server + // should provide its best guess as to the schema at this point. Clients must not assume that this + // schema, if provided, will be accurate. + bytes dataset_schema = 2; + + // If the query provided contained parameters, parameter_schema contains the + // schema of the expected parameters. It should be an IPC-encapsulated Schema, as described in Schema.fbs. + bytes parameter_schema = 3; +} + +/* + * Request message for the "ClosePreparedStatement" action on a Flight SQL enabled backend. + * Closes server resources associated with the prepared statement handle. + */ +message ActionClosePreparedStatementRequest { + option (experimental) = true; + + // Opaque handle for the prepared statement on the server. + bytes prepared_statement_handle = 1; +} + +/* + * Request message for the "BeginTransaction" action. + * Begins a transaction. + */ +message ActionBeginTransactionRequest { + option (experimental) = true; +} + +/* + * Request message for the "BeginSavepoint" action. + * Creates a savepoint within a transaction. + * + * Only supported if FLIGHT_SQL_TRANSACTION is + * FLIGHT_SQL_TRANSACTION_SUPPORT_SAVEPOINT. + */ +message ActionBeginSavepointRequest { + option (experimental) = true; + + // The transaction to which a savepoint belongs. + bytes transaction_id = 1; + // Name for the savepoint. + string name = 2; +} + +/* + * The result of a "BeginTransaction" action. + * + * The transaction can be manipulated with the "EndTransaction" action, or + * automatically via server timeout. If the transaction times out, then it is + * automatically rolled back. + * + * The result should be wrapped in a google.protobuf.Any message. + */ +message ActionBeginTransactionResult { + option (experimental) = true; + + // Opaque handle for the transaction on the server. + bytes transaction_id = 1; +} + +/* + * The result of a "BeginSavepoint" action. + * + * The transaction can be manipulated with the "EndSavepoint" action. + * If the associated transaction is committed, rolled back, or times + * out, then the savepoint is also invalidated. + * + * The result should be wrapped in a google.protobuf.Any message. + */ +message ActionBeginSavepointResult { + option (experimental) = true; + + // Opaque handle for the savepoint on the server. + bytes savepoint_id = 1; +} + +/* + * Request message for the "EndTransaction" action. + * + * Commit (COMMIT) or rollback (ROLLBACK) the transaction. + * + * If the action completes successfully, the transaction handle is + * invalidated, as are all associated savepoints. + */ +message ActionEndTransactionRequest { + option (experimental) = true; + + enum EndTransaction { + END_TRANSACTION_UNSPECIFIED = 0; + // Commit the transaction. + END_TRANSACTION_COMMIT = 1; + // Roll back the transaction. + END_TRANSACTION_ROLLBACK = 2; + } + // Opaque handle for the transaction on the server. + bytes transaction_id = 1; + // Whether to commit/rollback the given transaction. + EndTransaction action = 2; +} + +/* + * Request message for the "EndSavepoint" action. + * + * Release (RELEASE) the savepoint or rollback (ROLLBACK) to the + * savepoint. + * + * Releasing a savepoint invalidates that savepoint. Rolling back to + * a savepoint does not invalidate the savepoint, but invalidates all + * savepoints created after the current savepoint. + */ +message ActionEndSavepointRequest { + option (experimental) = true; + + enum EndSavepoint { + END_SAVEPOINT_UNSPECIFIED = 0; + // Release the savepoint. + END_SAVEPOINT_RELEASE = 1; + // Roll back to a savepoint. + END_SAVEPOINT_ROLLBACK = 2; + } + // Opaque handle for the savepoint on the server. + bytes savepoint_id = 1; + // Whether to rollback/release the given savepoint. + EndSavepoint action = 2; +} + +// Query Execution Messages. + +/* + * Represents a SQL query. Used in the command member of FlightDescriptor + * for the following RPC calls: + * - GetSchema: return the Arrow schema of the query. + * Fields on this schema may contain the following metadata: + * - ARROW:FLIGHT:SQL:CATALOG_NAME - Table's catalog name + * - ARROW:FLIGHT:SQL:DB_SCHEMA_NAME - Database schema name + * - ARROW:FLIGHT:SQL:TABLE_NAME - Table name + * - ARROW:FLIGHT:SQL:TYPE_NAME - The data source-specific name for the data type of the column. + * - ARROW:FLIGHT:SQL:PRECISION - Column precision/size + * - ARROW:FLIGHT:SQL:SCALE - Column scale/decimal digits if applicable + * - ARROW:FLIGHT:SQL:IS_AUTO_INCREMENT - "1" indicates if the column is auto incremented, "0" otherwise. + * - ARROW:FLIGHT:SQL:IS_CASE_SENSITIVE - "1" indicates if the column is case-sensitive, "0" otherwise. + * - ARROW:FLIGHT:SQL:IS_READ_ONLY - "1" indicates if the column is read only, "0" otherwise. + * - ARROW:FLIGHT:SQL:IS_SEARCHABLE - "1" indicates if the column is searchable via WHERE clause, "0" otherwise. + * - GetFlightInfo: execute the query. + */ +message CommandStatementQuery { + option (experimental) = true; + + // The SQL syntax. + string query = 1; + // Include the query as part of this transaction (if unset, the query is auto-committed). + optional bytes transaction_id = 2; +} + +/* + * Represents a Substrait plan. Used in the command member of FlightDescriptor + * for the following RPC calls: + * - GetSchema: return the Arrow schema of the query. + * Fields on this schema may contain the following metadata: + * - ARROW:FLIGHT:SQL:CATALOG_NAME - Table's catalog name + * - ARROW:FLIGHT:SQL:DB_SCHEMA_NAME - Database schema name + * - ARROW:FLIGHT:SQL:TABLE_NAME - Table name + * - ARROW:FLIGHT:SQL:TYPE_NAME - The data source-specific name for the data type of the column. + * - ARROW:FLIGHT:SQL:PRECISION - Column precision/size + * - ARROW:FLIGHT:SQL:SCALE - Column scale/decimal digits if applicable + * - ARROW:FLIGHT:SQL:IS_AUTO_INCREMENT - "1" indicates if the column is auto incremented, "0" otherwise. + * - ARROW:FLIGHT:SQL:IS_CASE_SENSITIVE - "1" indicates if the column is case-sensitive, "0" otherwise. + * - ARROW:FLIGHT:SQL:IS_READ_ONLY - "1" indicates if the column is read only, "0" otherwise. + * - ARROW:FLIGHT:SQL:IS_SEARCHABLE - "1" indicates if the column is searchable via WHERE clause, "0" otherwise. + * - GetFlightInfo: execute the query. + * - DoPut: execute the query. + */ +message CommandStatementSubstraitPlan { + option (experimental) = true; + + // A serialized substrait.Plan + SubstraitPlan plan = 1; + // Include the query as part of this transaction (if unset, the query is auto-committed). + optional bytes transaction_id = 2; +} + +/** + * Represents a ticket resulting from GetFlightInfo with a CommandStatementQuery. + * This should be used only once and treated as an opaque value, that is, clients should not attempt to parse this. + */ +message TicketStatementQuery { + option (experimental) = true; + + // Unique identifier for the instance of the statement to execute. + bytes statement_handle = 1; +} + +/* + * Represents an instance of executing a prepared statement. Used in the command member of FlightDescriptor for + * the following RPC calls: + * - GetSchema: return the Arrow schema of the query. + * Fields on this schema may contain the following metadata: + * - ARROW:FLIGHT:SQL:CATALOG_NAME - Table's catalog name + * - ARROW:FLIGHT:SQL:DB_SCHEMA_NAME - Database schema name + * - ARROW:FLIGHT:SQL:TABLE_NAME - Table name + * - ARROW:FLIGHT:SQL:TYPE_NAME - The data source-specific name for the data type of the column. + * - ARROW:FLIGHT:SQL:PRECISION - Column precision/size + * - ARROW:FLIGHT:SQL:SCALE - Column scale/decimal digits if applicable + * - ARROW:FLIGHT:SQL:IS_AUTO_INCREMENT - "1" indicates if the column is auto incremented, "0" otherwise. + * - ARROW:FLIGHT:SQL:IS_CASE_SENSITIVE - "1" indicates if the column is case-sensitive, "0" otherwise. + * - ARROW:FLIGHT:SQL:IS_READ_ONLY - "1" indicates if the column is read only, "0" otherwise. + * - ARROW:FLIGHT:SQL:IS_SEARCHABLE - "1" indicates if the column is searchable via WHERE clause, "0" otherwise. + * + * If the schema is retrieved after parameter values have been bound with DoPut, then the server should account + * for the parameters when determining the schema. + * - DoPut: bind parameter values. All of the bound parameter sets will be executed as a single atomic execution. + * - GetFlightInfo: execute the prepared statement instance. + */ +message CommandPreparedStatementQuery { + option (experimental) = true; + + // Opaque handle for the prepared statement on the server. + bytes prepared_statement_handle = 1; +} + +/* + * Represents a SQL update query. Used in the command member of FlightDescriptor + * for the RPC call DoPut to cause the server to execute the included SQL update. + */ +message CommandStatementUpdate { + option (experimental) = true; + + // The SQL syntax. + string query = 1; + // Include the query as part of this transaction (if unset, the query is auto-committed). + optional bytes transaction_id = 2; +} + +/* + * Represents a SQL update query. Used in the command member of FlightDescriptor + * for the RPC call DoPut to cause the server to execute the included + * prepared statement handle as an update. + */ +message CommandPreparedStatementUpdate { + option (experimental) = true; + + // Opaque handle for the prepared statement on the server. + bytes prepared_statement_handle = 1; +} + +/* + * Returned from the RPC call DoPut when a CommandStatementUpdate + * CommandPreparedStatementUpdate was in the request, containing + * results from the update. + */ +message DoPutUpdateResult { + option (experimental) = true; + + // The number of records updated. A return value of -1 represents + // an unknown updated record count. + int64 record_count = 1; +} + +/* An *optional* response returned when `DoPut` is called with `CommandPreparedStatementQuery`. + * + * *Note on legacy behavior*: previous versions of the protocol did not return any result for + * this command, and that behavior should still be supported by clients. In that case, the client + * can continue as though the fields in this message were not provided or set to sensible default values. + */ +message DoPutPreparedStatementResult { + option (experimental) = true; + + // Represents a (potentially updated) opaque handle for the prepared statement on the server. + // Because the handle could potentially be updated, any previous handles for this prepared + // statement should be considered invalid, and all subsequent requests for this prepared + // statement must use this new handle. + // The updated handle allows implementing query parameters with stateless services. + // + // When an updated handle is not provided by the server, clients should contiue + // using the previous handle provided by `ActionCreatePreparedStatementResonse`. + optional bytes prepared_statement_handle = 1; +} + +/* + * Request message for the "CancelQuery" action. + * + * Explicitly cancel a running query. + * + * This lets a single client explicitly cancel work, no matter how many clients + * are involved/whether the query is distributed or not, given server support. + * The transaction/statement is not rolled back; it is the application's job to + * commit or rollback as appropriate. This only indicates the client no longer + * wishes to read the remainder of the query results or continue submitting + * data. + * + * This command is idempotent. + * + * This command is deprecated since 13.0.0. Use the "CancelFlightInfo" + * action with DoAction instead. + */ +message ActionCancelQueryRequest { + option deprecated = true; + option (experimental) = true; + + // The result of the GetFlightInfo RPC that initiated the query. + // XXX(ARROW-16902): this must be a serialized FlightInfo, but is + // rendered as bytes because Protobuf does not really support one + // DLL using Protobuf definitions from another DLL. + bytes info = 1; +} + +/* + * The result of cancelling a query. + * + * The result should be wrapped in a google.protobuf.Any message. + * + * This command is deprecated since 13.0.0. Use the "CancelFlightInfo" + * action with DoAction instead. + */ +message ActionCancelQueryResult { + option deprecated = true; + option (experimental) = true; + + enum CancelResult { + // The cancellation status is unknown. Servers should avoid using + // this value (send a NOT_FOUND error if the requested query is + // not known). Clients can retry the request. + CANCEL_RESULT_UNSPECIFIED = 0; + // The cancellation request is complete. Subsequent requests with + // the same payload may return CANCELLED or a NOT_FOUND error. + CANCEL_RESULT_CANCELLED = 1; + // The cancellation request is in progress. The client may retry + // the cancellation request. + CANCEL_RESULT_CANCELLING = 2; + // The query is not cancellable. The client should not retry the + // cancellation request. + CANCEL_RESULT_NOT_CANCELLABLE = 3; + } + + CancelResult result = 1; +} + +extend google.protobuf.MessageOptions { + bool experimental = 1000; +} diff --git a/java/flight/flight-core/target/classes/arrow-git.properties b/java/flight/flight-core/target/classes/arrow-git.properties new file mode 100644 index 000000000000..b05390069b77 --- /dev/null +++ b/java/flight/flight-core/target/classes/arrow-git.properties @@ -0,0 +1,26 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=21.08.2025 @ 12\:10\:23 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Ra\u00FAl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 diff --git a/java/flight/flight-core/target/generated-sources/protobuf/grpc-java/org/apache/arrow/flight/impl/FlightServiceGrpc.java b/java/flight/flight-core/target/generated-sources/protobuf/grpc-java/org/apache/arrow/flight/impl/FlightServiceGrpc.java new file mode 100644 index 000000000000..4f4a7dbeba34 --- /dev/null +++ b/java/flight/flight-core/target/generated-sources/protobuf/grpc-java/org/apache/arrow/flight/impl/FlightServiceGrpc.java @@ -0,0 +1,1194 @@ +package org.apache.arrow.flight.impl; + +import static io.grpc.MethodDescriptor.generateFullMethodName; + +/** + *

+ * A flight service is an endpoint for retrieving or storing Arrow data. A
+ * flight service can expose one or more predefined endpoints that can be
+ * accessed using the Arrow Flight Protocol. Additionally, a flight service
+ * can expose a set of actions that are available.
+ * 
+ */ +@javax.annotation.Generated( + value = "by gRPC proto compiler (version 1.63.0)", + comments = "Source: Flight.proto") +@io.grpc.stub.annotations.GrpcGenerated +public final class FlightServiceGrpc { + + private FlightServiceGrpc() {} + + public static final java.lang.String SERVICE_NAME = "arrow.flight.protocol.FlightService"; + + // Static method descriptors that strictly reflect the proto. + private static volatile io.grpc.MethodDescriptor getHandshakeMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "Handshake", + requestType = org.apache.arrow.flight.impl.Flight.HandshakeRequest.class, + responseType = org.apache.arrow.flight.impl.Flight.HandshakeResponse.class, + methodType = io.grpc.MethodDescriptor.MethodType.BIDI_STREAMING) + public static io.grpc.MethodDescriptor getHandshakeMethod() { + io.grpc.MethodDescriptor getHandshakeMethod; + if ((getHandshakeMethod = FlightServiceGrpc.getHandshakeMethod) == null) { + synchronized (FlightServiceGrpc.class) { + if ((getHandshakeMethod = FlightServiceGrpc.getHandshakeMethod) == null) { + FlightServiceGrpc.getHandshakeMethod = getHandshakeMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.BIDI_STREAMING) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "Handshake")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.arrow.flight.impl.Flight.HandshakeRequest.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.arrow.flight.impl.Flight.HandshakeResponse.getDefaultInstance())) + .setSchemaDescriptor(new FlightServiceMethodDescriptorSupplier("Handshake")) + .build(); + } + } + } + return getHandshakeMethod; + } + + private static volatile io.grpc.MethodDescriptor getListFlightsMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "ListFlights", + requestType = org.apache.arrow.flight.impl.Flight.Criteria.class, + responseType = org.apache.arrow.flight.impl.Flight.FlightInfo.class, + methodType = io.grpc.MethodDescriptor.MethodType.SERVER_STREAMING) + public static io.grpc.MethodDescriptor getListFlightsMethod() { + io.grpc.MethodDescriptor getListFlightsMethod; + if ((getListFlightsMethod = FlightServiceGrpc.getListFlightsMethod) == null) { + synchronized (FlightServiceGrpc.class) { + if ((getListFlightsMethod = FlightServiceGrpc.getListFlightsMethod) == null) { + FlightServiceGrpc.getListFlightsMethod = getListFlightsMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.SERVER_STREAMING) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListFlights")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.arrow.flight.impl.Flight.Criteria.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.arrow.flight.impl.Flight.FlightInfo.getDefaultInstance())) + .setSchemaDescriptor(new FlightServiceMethodDescriptorSupplier("ListFlights")) + .build(); + } + } + } + return getListFlightsMethod; + } + + private static volatile io.grpc.MethodDescriptor getGetFlightInfoMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "GetFlightInfo", + requestType = org.apache.arrow.flight.impl.Flight.FlightDescriptor.class, + responseType = org.apache.arrow.flight.impl.Flight.FlightInfo.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor getGetFlightInfoMethod() { + io.grpc.MethodDescriptor getGetFlightInfoMethod; + if ((getGetFlightInfoMethod = FlightServiceGrpc.getGetFlightInfoMethod) == null) { + synchronized (FlightServiceGrpc.class) { + if ((getGetFlightInfoMethod = FlightServiceGrpc.getGetFlightInfoMethod) == null) { + FlightServiceGrpc.getGetFlightInfoMethod = getGetFlightInfoMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetFlightInfo")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.arrow.flight.impl.Flight.FlightDescriptor.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.arrow.flight.impl.Flight.FlightInfo.getDefaultInstance())) + .setSchemaDescriptor(new FlightServiceMethodDescriptorSupplier("GetFlightInfo")) + .build(); + } + } + } + return getGetFlightInfoMethod; + } + + private static volatile io.grpc.MethodDescriptor getPollFlightInfoMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "PollFlightInfo", + requestType = org.apache.arrow.flight.impl.Flight.FlightDescriptor.class, + responseType = org.apache.arrow.flight.impl.Flight.PollInfo.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor getPollFlightInfoMethod() { + io.grpc.MethodDescriptor getPollFlightInfoMethod; + if ((getPollFlightInfoMethod = FlightServiceGrpc.getPollFlightInfoMethod) == null) { + synchronized (FlightServiceGrpc.class) { + if ((getPollFlightInfoMethod = FlightServiceGrpc.getPollFlightInfoMethod) == null) { + FlightServiceGrpc.getPollFlightInfoMethod = getPollFlightInfoMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "PollFlightInfo")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.arrow.flight.impl.Flight.FlightDescriptor.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.arrow.flight.impl.Flight.PollInfo.getDefaultInstance())) + .setSchemaDescriptor(new FlightServiceMethodDescriptorSupplier("PollFlightInfo")) + .build(); + } + } + } + return getPollFlightInfoMethod; + } + + private static volatile io.grpc.MethodDescriptor getGetSchemaMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "GetSchema", + requestType = org.apache.arrow.flight.impl.Flight.FlightDescriptor.class, + responseType = org.apache.arrow.flight.impl.Flight.SchemaResult.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor getGetSchemaMethod() { + io.grpc.MethodDescriptor getGetSchemaMethod; + if ((getGetSchemaMethod = FlightServiceGrpc.getGetSchemaMethod) == null) { + synchronized (FlightServiceGrpc.class) { + if ((getGetSchemaMethod = FlightServiceGrpc.getGetSchemaMethod) == null) { + FlightServiceGrpc.getGetSchemaMethod = getGetSchemaMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetSchema")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.arrow.flight.impl.Flight.FlightDescriptor.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.arrow.flight.impl.Flight.SchemaResult.getDefaultInstance())) + .setSchemaDescriptor(new FlightServiceMethodDescriptorSupplier("GetSchema")) + .build(); + } + } + } + return getGetSchemaMethod; + } + + private static volatile io.grpc.MethodDescriptor getDoGetMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "DoGet", + requestType = org.apache.arrow.flight.impl.Flight.Ticket.class, + responseType = org.apache.arrow.flight.impl.Flight.FlightData.class, + methodType = io.grpc.MethodDescriptor.MethodType.SERVER_STREAMING) + public static io.grpc.MethodDescriptor getDoGetMethod() { + io.grpc.MethodDescriptor getDoGetMethod; + if ((getDoGetMethod = FlightServiceGrpc.getDoGetMethod) == null) { + synchronized (FlightServiceGrpc.class) { + if ((getDoGetMethod = FlightServiceGrpc.getDoGetMethod) == null) { + FlightServiceGrpc.getDoGetMethod = getDoGetMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.SERVER_STREAMING) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DoGet")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.arrow.flight.impl.Flight.Ticket.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.arrow.flight.impl.Flight.FlightData.getDefaultInstance())) + .setSchemaDescriptor(new FlightServiceMethodDescriptorSupplier("DoGet")) + .build(); + } + } + } + return getDoGetMethod; + } + + private static volatile io.grpc.MethodDescriptor getDoPutMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "DoPut", + requestType = org.apache.arrow.flight.impl.Flight.FlightData.class, + responseType = org.apache.arrow.flight.impl.Flight.PutResult.class, + methodType = io.grpc.MethodDescriptor.MethodType.BIDI_STREAMING) + public static io.grpc.MethodDescriptor getDoPutMethod() { + io.grpc.MethodDescriptor getDoPutMethod; + if ((getDoPutMethod = FlightServiceGrpc.getDoPutMethod) == null) { + synchronized (FlightServiceGrpc.class) { + if ((getDoPutMethod = FlightServiceGrpc.getDoPutMethod) == null) { + FlightServiceGrpc.getDoPutMethod = getDoPutMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.BIDI_STREAMING) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DoPut")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.arrow.flight.impl.Flight.FlightData.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.arrow.flight.impl.Flight.PutResult.getDefaultInstance())) + .setSchemaDescriptor(new FlightServiceMethodDescriptorSupplier("DoPut")) + .build(); + } + } + } + return getDoPutMethod; + } + + private static volatile io.grpc.MethodDescriptor getDoExchangeMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "DoExchange", + requestType = org.apache.arrow.flight.impl.Flight.FlightData.class, + responseType = org.apache.arrow.flight.impl.Flight.FlightData.class, + methodType = io.grpc.MethodDescriptor.MethodType.BIDI_STREAMING) + public static io.grpc.MethodDescriptor getDoExchangeMethod() { + io.grpc.MethodDescriptor getDoExchangeMethod; + if ((getDoExchangeMethod = FlightServiceGrpc.getDoExchangeMethod) == null) { + synchronized (FlightServiceGrpc.class) { + if ((getDoExchangeMethod = FlightServiceGrpc.getDoExchangeMethod) == null) { + FlightServiceGrpc.getDoExchangeMethod = getDoExchangeMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.BIDI_STREAMING) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DoExchange")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.arrow.flight.impl.Flight.FlightData.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.arrow.flight.impl.Flight.FlightData.getDefaultInstance())) + .setSchemaDescriptor(new FlightServiceMethodDescriptorSupplier("DoExchange")) + .build(); + } + } + } + return getDoExchangeMethod; + } + + private static volatile io.grpc.MethodDescriptor getDoActionMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "DoAction", + requestType = org.apache.arrow.flight.impl.Flight.Action.class, + responseType = org.apache.arrow.flight.impl.Flight.Result.class, + methodType = io.grpc.MethodDescriptor.MethodType.SERVER_STREAMING) + public static io.grpc.MethodDescriptor getDoActionMethod() { + io.grpc.MethodDescriptor getDoActionMethod; + if ((getDoActionMethod = FlightServiceGrpc.getDoActionMethod) == null) { + synchronized (FlightServiceGrpc.class) { + if ((getDoActionMethod = FlightServiceGrpc.getDoActionMethod) == null) { + FlightServiceGrpc.getDoActionMethod = getDoActionMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.SERVER_STREAMING) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DoAction")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.arrow.flight.impl.Flight.Action.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.arrow.flight.impl.Flight.Result.getDefaultInstance())) + .setSchemaDescriptor(new FlightServiceMethodDescriptorSupplier("DoAction")) + .build(); + } + } + } + return getDoActionMethod; + } + + private static volatile io.grpc.MethodDescriptor getListActionsMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "ListActions", + requestType = org.apache.arrow.flight.impl.Flight.Empty.class, + responseType = org.apache.arrow.flight.impl.Flight.ActionType.class, + methodType = io.grpc.MethodDescriptor.MethodType.SERVER_STREAMING) + public static io.grpc.MethodDescriptor getListActionsMethod() { + io.grpc.MethodDescriptor getListActionsMethod; + if ((getListActionsMethod = FlightServiceGrpc.getListActionsMethod) == null) { + synchronized (FlightServiceGrpc.class) { + if ((getListActionsMethod = FlightServiceGrpc.getListActionsMethod) == null) { + FlightServiceGrpc.getListActionsMethod = getListActionsMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.SERVER_STREAMING) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListActions")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.arrow.flight.impl.Flight.Empty.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + org.apache.arrow.flight.impl.Flight.ActionType.getDefaultInstance())) + .setSchemaDescriptor(new FlightServiceMethodDescriptorSupplier("ListActions")) + .build(); + } + } + } + return getListActionsMethod; + } + + /** + * Creates a new async stub that supports all call types for the service + */ + public static FlightServiceStub newStub(io.grpc.Channel channel) { + io.grpc.stub.AbstractStub.StubFactory factory = + new io.grpc.stub.AbstractStub.StubFactory() { + @java.lang.Override + public FlightServiceStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new FlightServiceStub(channel, callOptions); + } + }; + return FlightServiceStub.newStub(factory, channel); + } + + /** + * Creates a new blocking-style stub that supports unary and streaming output calls on the service + */ + public static FlightServiceBlockingStub newBlockingStub( + io.grpc.Channel channel) { + io.grpc.stub.AbstractStub.StubFactory factory = + new io.grpc.stub.AbstractStub.StubFactory() { + @java.lang.Override + public FlightServiceBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new FlightServiceBlockingStub(channel, callOptions); + } + }; + return FlightServiceBlockingStub.newStub(factory, channel); + } + + /** + * Creates a new ListenableFuture-style stub that supports unary calls on the service + */ + public static FlightServiceFutureStub newFutureStub( + io.grpc.Channel channel) { + io.grpc.stub.AbstractStub.StubFactory factory = + new io.grpc.stub.AbstractStub.StubFactory() { + @java.lang.Override + public FlightServiceFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new FlightServiceFutureStub(channel, callOptions); + } + }; + return FlightServiceFutureStub.newStub(factory, channel); + } + + /** + *
+   * A flight service is an endpoint for retrieving or storing Arrow data. A
+   * flight service can expose one or more predefined endpoints that can be
+   * accessed using the Arrow Flight Protocol. Additionally, a flight service
+   * can expose a set of actions that are available.
+   * 
+ */ + public interface AsyncService { + + /** + *
+     * Handshake between client and server. Depending on the server, the
+     * handshake may be required to determine the token that should be used for
+     * future operations. Both request and response are streams to allow multiple
+     * round-trips depending on auth mechanism.
+     * 
+ */ + default io.grpc.stub.StreamObserver handshake( + io.grpc.stub.StreamObserver responseObserver) { + return io.grpc.stub.ServerCalls.asyncUnimplementedStreamingCall(getHandshakeMethod(), responseObserver); + } + + /** + *
+     * Get a list of available streams given a particular criteria. Most flight
+     * services will expose one or more streams that are readily available for
+     * retrieval. This api allows listing the streams available for
+     * consumption. A user can also provide a criteria. The criteria can limit
+     * the subset of streams that can be listed via this interface. Each flight
+     * service allows its own definition of how to consume criteria.
+     * 
+ */ + default void listFlights(org.apache.arrow.flight.impl.Flight.Criteria request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getListFlightsMethod(), responseObserver); + } + + /** + *
+     * For a given FlightDescriptor, get information about how the flight can be
+     * consumed. This is a useful interface if the consumer of the interface
+     * already can identify the specific flight to consume. This interface can
+     * also allow a consumer to generate a flight stream through a specified
+     * descriptor. For example, a flight descriptor might be something that
+     * includes a SQL statement or a Pickled Python operation that will be
+     * executed. In those cases, the descriptor will not be previously available
+     * within the list of available streams provided by ListFlights but will be
+     * available for consumption for the duration defined by the specific flight
+     * service.
+     * 
+ */ + default void getFlightInfo(org.apache.arrow.flight.impl.Flight.FlightDescriptor request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetFlightInfoMethod(), responseObserver); + } + + /** + *
+     * For a given FlightDescriptor, start a query and get information
+     * to poll its execution status. This is a useful interface if the
+     * query may be a long-running query. The first PollFlightInfo call
+     * should return as quickly as possible. (GetFlightInfo doesn't
+     * return until the query is complete.)
+     * A client can consume any available results before
+     * the query is completed. See PollInfo.info for details.
+     * A client can poll the updated query status by calling
+     * PollFlightInfo() with PollInfo.flight_descriptor. A server
+     * should not respond until the result would be different from last
+     * time. That way, the client can "long poll" for updates
+     * without constantly making requests. Clients can set a short timeout
+     * to avoid blocking calls if desired.
+     * A client can't use PollInfo.flight_descriptor after
+     * PollInfo.expiration_time passes. A server might not accept the
+     * retry descriptor anymore and the query may be cancelled.
+     * A client may use the CancelFlightInfo action with
+     * PollInfo.info to cancel the running query.
+     * 
+ */ + default void pollFlightInfo(org.apache.arrow.flight.impl.Flight.FlightDescriptor request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getPollFlightInfoMethod(), responseObserver); + } + + /** + *
+     * For a given FlightDescriptor, get the Schema as described in Schema.fbs::Schema
+     * This is used when a consumer needs the Schema of flight stream. Similar to
+     * GetFlightInfo this interface may generate a new flight that was not previously
+     * available in ListFlights.
+     * 
+ */ + default void getSchema(org.apache.arrow.flight.impl.Flight.FlightDescriptor request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetSchemaMethod(), responseObserver); + } + + /** + *
+     * Retrieve a single stream associated with a particular descriptor
+     * associated with the referenced ticket. A Flight can be composed of one or
+     * more streams where each stream can be retrieved using a separate opaque
+     * ticket that the flight service uses for managing a collection of streams.
+     * 
+ */ + default void doGet(org.apache.arrow.flight.impl.Flight.Ticket request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getDoGetMethod(), responseObserver); + } + + /** + *
+     * Push a stream to the flight service associated with a particular
+     * flight stream. This allows a client of a flight service to upload a stream
+     * of data. Depending on the particular flight service, a client consumer
+     * could be allowed to upload a single stream per descriptor or an unlimited
+     * number. In the latter, the service might implement a 'seal' action that
+     * can be applied to a descriptor once all streams are uploaded.
+     * 
+ */ + default io.grpc.stub.StreamObserver doPut( + io.grpc.stub.StreamObserver responseObserver) { + return io.grpc.stub.ServerCalls.asyncUnimplementedStreamingCall(getDoPutMethod(), responseObserver); + } + + /** + *
+     * Open a bidirectional data channel for a given descriptor. This
+     * allows clients to send and receive arbitrary Arrow data and
+     * application-specific metadata in a single logical stream. In
+     * contrast to DoGet/DoPut, this is more suited for clients
+     * offloading computation (rather than storage) to a Flight service.
+     * 
+ */ + default io.grpc.stub.StreamObserver doExchange( + io.grpc.stub.StreamObserver responseObserver) { + return io.grpc.stub.ServerCalls.asyncUnimplementedStreamingCall(getDoExchangeMethod(), responseObserver); + } + + /** + *
+     * Flight services can support an arbitrary number of simple actions in
+     * addition to the possible ListFlights, GetFlightInfo, DoGet, DoPut
+     * operations that are potentially available. DoAction allows a flight client
+     * to do a specific action against a flight service. An action includes
+     * opaque request and response objects that are specific to the type action
+     * being undertaken.
+     * 
+ */ + default void doAction(org.apache.arrow.flight.impl.Flight.Action request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getDoActionMethod(), responseObserver); + } + + /** + *
+     * A flight service exposes all of the available action types that it has
+     * along with descriptions. This allows different flight consumers to
+     * understand the capabilities of the flight service.
+     * 
+ */ + default void listActions(org.apache.arrow.flight.impl.Flight.Empty request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getListActionsMethod(), responseObserver); + } + } + + /** + * Base class for the server implementation of the service FlightService. + *
+   * A flight service is an endpoint for retrieving or storing Arrow data. A
+   * flight service can expose one or more predefined endpoints that can be
+   * accessed using the Arrow Flight Protocol. Additionally, a flight service
+   * can expose a set of actions that are available.
+   * 
+ */ + public static abstract class FlightServiceImplBase + implements io.grpc.BindableService, AsyncService { + + @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { + return FlightServiceGrpc.bindService(this); + } + } + + /** + * A stub to allow clients to do asynchronous rpc calls to service FlightService. + *
+   * A flight service is an endpoint for retrieving or storing Arrow data. A
+   * flight service can expose one or more predefined endpoints that can be
+   * accessed using the Arrow Flight Protocol. Additionally, a flight service
+   * can expose a set of actions that are available.
+   * 
+ */ + public static final class FlightServiceStub + extends io.grpc.stub.AbstractAsyncStub { + private FlightServiceStub( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + super(channel, callOptions); + } + + @java.lang.Override + protected FlightServiceStub build( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new FlightServiceStub(channel, callOptions); + } + + /** + *
+     * Handshake between client and server. Depending on the server, the
+     * handshake may be required to determine the token that should be used for
+     * future operations. Both request and response are streams to allow multiple
+     * round-trips depending on auth mechanism.
+     * 
+ */ + public io.grpc.stub.StreamObserver handshake( + io.grpc.stub.StreamObserver responseObserver) { + return io.grpc.stub.ClientCalls.asyncBidiStreamingCall( + getChannel().newCall(getHandshakeMethod(), getCallOptions()), responseObserver); + } + + /** + *
+     * Get a list of available streams given a particular criteria. Most flight
+     * services will expose one or more streams that are readily available for
+     * retrieval. This api allows listing the streams available for
+     * consumption. A user can also provide a criteria. The criteria can limit
+     * the subset of streams that can be listed via this interface. Each flight
+     * service allows its own definition of how to consume criteria.
+     * 
+ */ + public void listFlights(org.apache.arrow.flight.impl.Flight.Criteria request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ClientCalls.asyncServerStreamingCall( + getChannel().newCall(getListFlightsMethod(), getCallOptions()), request, responseObserver); + } + + /** + *
+     * For a given FlightDescriptor, get information about how the flight can be
+     * consumed. This is a useful interface if the consumer of the interface
+     * already can identify the specific flight to consume. This interface can
+     * also allow a consumer to generate a flight stream through a specified
+     * descriptor. For example, a flight descriptor might be something that
+     * includes a SQL statement or a Pickled Python operation that will be
+     * executed. In those cases, the descriptor will not be previously available
+     * within the list of available streams provided by ListFlights but will be
+     * available for consumption for the duration defined by the specific flight
+     * service.
+     * 
+ */ + public void getFlightInfo(org.apache.arrow.flight.impl.Flight.FlightDescriptor request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getGetFlightInfoMethod(), getCallOptions()), request, responseObserver); + } + + /** + *
+     * For a given FlightDescriptor, start a query and get information
+     * to poll its execution status. This is a useful interface if the
+     * query may be a long-running query. The first PollFlightInfo call
+     * should return as quickly as possible. (GetFlightInfo doesn't
+     * return until the query is complete.)
+     * A client can consume any available results before
+     * the query is completed. See PollInfo.info for details.
+     * A client can poll the updated query status by calling
+     * PollFlightInfo() with PollInfo.flight_descriptor. A server
+     * should not respond until the result would be different from last
+     * time. That way, the client can "long poll" for updates
+     * without constantly making requests. Clients can set a short timeout
+     * to avoid blocking calls if desired.
+     * A client can't use PollInfo.flight_descriptor after
+     * PollInfo.expiration_time passes. A server might not accept the
+     * retry descriptor anymore and the query may be cancelled.
+     * A client may use the CancelFlightInfo action with
+     * PollInfo.info to cancel the running query.
+     * 
+ */ + public void pollFlightInfo(org.apache.arrow.flight.impl.Flight.FlightDescriptor request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getPollFlightInfoMethod(), getCallOptions()), request, responseObserver); + } + + /** + *
+     * For a given FlightDescriptor, get the Schema as described in Schema.fbs::Schema
+     * This is used when a consumer needs the Schema of flight stream. Similar to
+     * GetFlightInfo this interface may generate a new flight that was not previously
+     * available in ListFlights.
+     * 
+ */ + public void getSchema(org.apache.arrow.flight.impl.Flight.FlightDescriptor request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getGetSchemaMethod(), getCallOptions()), request, responseObserver); + } + + /** + *
+     * Retrieve a single stream associated with a particular descriptor
+     * associated with the referenced ticket. A Flight can be composed of one or
+     * more streams where each stream can be retrieved using a separate opaque
+     * ticket that the flight service uses for managing a collection of streams.
+     * 
+ */ + public void doGet(org.apache.arrow.flight.impl.Flight.Ticket request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ClientCalls.asyncServerStreamingCall( + getChannel().newCall(getDoGetMethod(), getCallOptions()), request, responseObserver); + } + + /** + *
+     * Push a stream to the flight service associated with a particular
+     * flight stream. This allows a client of a flight service to upload a stream
+     * of data. Depending on the particular flight service, a client consumer
+     * could be allowed to upload a single stream per descriptor or an unlimited
+     * number. In the latter, the service might implement a 'seal' action that
+     * can be applied to a descriptor once all streams are uploaded.
+     * 
+ */ + public io.grpc.stub.StreamObserver doPut( + io.grpc.stub.StreamObserver responseObserver) { + return io.grpc.stub.ClientCalls.asyncBidiStreamingCall( + getChannel().newCall(getDoPutMethod(), getCallOptions()), responseObserver); + } + + /** + *
+     * Open a bidirectional data channel for a given descriptor. This
+     * allows clients to send and receive arbitrary Arrow data and
+     * application-specific metadata in a single logical stream. In
+     * contrast to DoGet/DoPut, this is more suited for clients
+     * offloading computation (rather than storage) to a Flight service.
+     * 
+ */ + public io.grpc.stub.StreamObserver doExchange( + io.grpc.stub.StreamObserver responseObserver) { + return io.grpc.stub.ClientCalls.asyncBidiStreamingCall( + getChannel().newCall(getDoExchangeMethod(), getCallOptions()), responseObserver); + } + + /** + *
+     * Flight services can support an arbitrary number of simple actions in
+     * addition to the possible ListFlights, GetFlightInfo, DoGet, DoPut
+     * operations that are potentially available. DoAction allows a flight client
+     * to do a specific action against a flight service. An action includes
+     * opaque request and response objects that are specific to the type action
+     * being undertaken.
+     * 
+ */ + public void doAction(org.apache.arrow.flight.impl.Flight.Action request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ClientCalls.asyncServerStreamingCall( + getChannel().newCall(getDoActionMethod(), getCallOptions()), request, responseObserver); + } + + /** + *
+     * A flight service exposes all of the available action types that it has
+     * along with descriptions. This allows different flight consumers to
+     * understand the capabilities of the flight service.
+     * 
+ */ + public void listActions(org.apache.arrow.flight.impl.Flight.Empty request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ClientCalls.asyncServerStreamingCall( + getChannel().newCall(getListActionsMethod(), getCallOptions()), request, responseObserver); + } + } + + /** + * A stub to allow clients to do synchronous rpc calls to service FlightService. + *
+   * A flight service is an endpoint for retrieving or storing Arrow data. A
+   * flight service can expose one or more predefined endpoints that can be
+   * accessed using the Arrow Flight Protocol. Additionally, a flight service
+   * can expose a set of actions that are available.
+   * 
+ */ + public static final class FlightServiceBlockingStub + extends io.grpc.stub.AbstractBlockingStub { + private FlightServiceBlockingStub( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + super(channel, callOptions); + } + + @java.lang.Override + protected FlightServiceBlockingStub build( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new FlightServiceBlockingStub(channel, callOptions); + } + + /** + *
+     * Get a list of available streams given a particular criteria. Most flight
+     * services will expose one or more streams that are readily available for
+     * retrieval. This api allows listing the streams available for
+     * consumption. A user can also provide a criteria. The criteria can limit
+     * the subset of streams that can be listed via this interface. Each flight
+     * service allows its own definition of how to consume criteria.
+     * 
+ */ + public java.util.Iterator listFlights( + org.apache.arrow.flight.impl.Flight.Criteria request) { + return io.grpc.stub.ClientCalls.blockingServerStreamingCall( + getChannel(), getListFlightsMethod(), getCallOptions(), request); + } + + /** + *
+     * For a given FlightDescriptor, get information about how the flight can be
+     * consumed. This is a useful interface if the consumer of the interface
+     * already can identify the specific flight to consume. This interface can
+     * also allow a consumer to generate a flight stream through a specified
+     * descriptor. For example, a flight descriptor might be something that
+     * includes a SQL statement or a Pickled Python operation that will be
+     * executed. In those cases, the descriptor will not be previously available
+     * within the list of available streams provided by ListFlights but will be
+     * available for consumption for the duration defined by the specific flight
+     * service.
+     * 
+ */ + public org.apache.arrow.flight.impl.Flight.FlightInfo getFlightInfo(org.apache.arrow.flight.impl.Flight.FlightDescriptor request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getGetFlightInfoMethod(), getCallOptions(), request); + } + + /** + *
+     * For a given FlightDescriptor, start a query and get information
+     * to poll its execution status. This is a useful interface if the
+     * query may be a long-running query. The first PollFlightInfo call
+     * should return as quickly as possible. (GetFlightInfo doesn't
+     * return until the query is complete.)
+     * A client can consume any available results before
+     * the query is completed. See PollInfo.info for details.
+     * A client can poll the updated query status by calling
+     * PollFlightInfo() with PollInfo.flight_descriptor. A server
+     * should not respond until the result would be different from last
+     * time. That way, the client can "long poll" for updates
+     * without constantly making requests. Clients can set a short timeout
+     * to avoid blocking calls if desired.
+     * A client can't use PollInfo.flight_descriptor after
+     * PollInfo.expiration_time passes. A server might not accept the
+     * retry descriptor anymore and the query may be cancelled.
+     * A client may use the CancelFlightInfo action with
+     * PollInfo.info to cancel the running query.
+     * 
+ */ + public org.apache.arrow.flight.impl.Flight.PollInfo pollFlightInfo(org.apache.arrow.flight.impl.Flight.FlightDescriptor request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getPollFlightInfoMethod(), getCallOptions(), request); + } + + /** + *
+     * For a given FlightDescriptor, get the Schema as described in Schema.fbs::Schema
+     * This is used when a consumer needs the Schema of flight stream. Similar to
+     * GetFlightInfo this interface may generate a new flight that was not previously
+     * available in ListFlights.
+     * 
+ */ + public org.apache.arrow.flight.impl.Flight.SchemaResult getSchema(org.apache.arrow.flight.impl.Flight.FlightDescriptor request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getGetSchemaMethod(), getCallOptions(), request); + } + + /** + *
+     * Retrieve a single stream associated with a particular descriptor
+     * associated with the referenced ticket. A Flight can be composed of one or
+     * more streams where each stream can be retrieved using a separate opaque
+     * ticket that the flight service uses for managing a collection of streams.
+     * 
+ */ + public java.util.Iterator doGet( + org.apache.arrow.flight.impl.Flight.Ticket request) { + return io.grpc.stub.ClientCalls.blockingServerStreamingCall( + getChannel(), getDoGetMethod(), getCallOptions(), request); + } + + /** + *
+     * Flight services can support an arbitrary number of simple actions in
+     * addition to the possible ListFlights, GetFlightInfo, DoGet, DoPut
+     * operations that are potentially available. DoAction allows a flight client
+     * to do a specific action against a flight service. An action includes
+     * opaque request and response objects that are specific to the type action
+     * being undertaken.
+     * 
+ */ + public java.util.Iterator doAction( + org.apache.arrow.flight.impl.Flight.Action request) { + return io.grpc.stub.ClientCalls.blockingServerStreamingCall( + getChannel(), getDoActionMethod(), getCallOptions(), request); + } + + /** + *
+     * A flight service exposes all of the available action types that it has
+     * along with descriptions. This allows different flight consumers to
+     * understand the capabilities of the flight service.
+     * 
+ */ + public java.util.Iterator listActions( + org.apache.arrow.flight.impl.Flight.Empty request) { + return io.grpc.stub.ClientCalls.blockingServerStreamingCall( + getChannel(), getListActionsMethod(), getCallOptions(), request); + } + } + + /** + * A stub to allow clients to do ListenableFuture-style rpc calls to service FlightService. + *
+   * A flight service is an endpoint for retrieving or storing Arrow data. A
+   * flight service can expose one or more predefined endpoints that can be
+   * accessed using the Arrow Flight Protocol. Additionally, a flight service
+   * can expose a set of actions that are available.
+   * 
+ */ + public static final class FlightServiceFutureStub + extends io.grpc.stub.AbstractFutureStub { + private FlightServiceFutureStub( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + super(channel, callOptions); + } + + @java.lang.Override + protected FlightServiceFutureStub build( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new FlightServiceFutureStub(channel, callOptions); + } + + /** + *
+     * For a given FlightDescriptor, get information about how the flight can be
+     * consumed. This is a useful interface if the consumer of the interface
+     * already can identify the specific flight to consume. This interface can
+     * also allow a consumer to generate a flight stream through a specified
+     * descriptor. For example, a flight descriptor might be something that
+     * includes a SQL statement or a Pickled Python operation that will be
+     * executed. In those cases, the descriptor will not be previously available
+     * within the list of available streams provided by ListFlights but will be
+     * available for consumption for the duration defined by the specific flight
+     * service.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture getFlightInfo( + org.apache.arrow.flight.impl.Flight.FlightDescriptor request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getGetFlightInfoMethod(), getCallOptions()), request); + } + + /** + *
+     * For a given FlightDescriptor, start a query and get information
+     * to poll its execution status. This is a useful interface if the
+     * query may be a long-running query. The first PollFlightInfo call
+     * should return as quickly as possible. (GetFlightInfo doesn't
+     * return until the query is complete.)
+     * A client can consume any available results before
+     * the query is completed. See PollInfo.info for details.
+     * A client can poll the updated query status by calling
+     * PollFlightInfo() with PollInfo.flight_descriptor. A server
+     * should not respond until the result would be different from last
+     * time. That way, the client can "long poll" for updates
+     * without constantly making requests. Clients can set a short timeout
+     * to avoid blocking calls if desired.
+     * A client can't use PollInfo.flight_descriptor after
+     * PollInfo.expiration_time passes. A server might not accept the
+     * retry descriptor anymore and the query may be cancelled.
+     * A client may use the CancelFlightInfo action with
+     * PollInfo.info to cancel the running query.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture pollFlightInfo( + org.apache.arrow.flight.impl.Flight.FlightDescriptor request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getPollFlightInfoMethod(), getCallOptions()), request); + } + + /** + *
+     * For a given FlightDescriptor, get the Schema as described in Schema.fbs::Schema
+     * This is used when a consumer needs the Schema of flight stream. Similar to
+     * GetFlightInfo this interface may generate a new flight that was not previously
+     * available in ListFlights.
+     * 
+ */ + public com.google.common.util.concurrent.ListenableFuture getSchema( + org.apache.arrow.flight.impl.Flight.FlightDescriptor request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getGetSchemaMethod(), getCallOptions()), request); + } + } + + private static final int METHODID_LIST_FLIGHTS = 0; + private static final int METHODID_GET_FLIGHT_INFO = 1; + private static final int METHODID_POLL_FLIGHT_INFO = 2; + private static final int METHODID_GET_SCHEMA = 3; + private static final int METHODID_DO_GET = 4; + private static final int METHODID_DO_ACTION = 5; + private static final int METHODID_LIST_ACTIONS = 6; + private static final int METHODID_HANDSHAKE = 7; + private static final int METHODID_DO_PUT = 8; + private static final int METHODID_DO_EXCHANGE = 9; + + private static final class MethodHandlers implements + io.grpc.stub.ServerCalls.UnaryMethod, + io.grpc.stub.ServerCalls.ServerStreamingMethod, + io.grpc.stub.ServerCalls.ClientStreamingMethod, + io.grpc.stub.ServerCalls.BidiStreamingMethod { + private final AsyncService serviceImpl; + private final int methodId; + + MethodHandlers(AsyncService serviceImpl, int methodId) { + this.serviceImpl = serviceImpl; + this.methodId = methodId; + } + + @java.lang.Override + @java.lang.SuppressWarnings("unchecked") + public void invoke(Req request, io.grpc.stub.StreamObserver responseObserver) { + switch (methodId) { + case METHODID_LIST_FLIGHTS: + serviceImpl.listFlights((org.apache.arrow.flight.impl.Flight.Criteria) request, + (io.grpc.stub.StreamObserver) responseObserver); + break; + case METHODID_GET_FLIGHT_INFO: + serviceImpl.getFlightInfo((org.apache.arrow.flight.impl.Flight.FlightDescriptor) request, + (io.grpc.stub.StreamObserver) responseObserver); + break; + case METHODID_POLL_FLIGHT_INFO: + serviceImpl.pollFlightInfo((org.apache.arrow.flight.impl.Flight.FlightDescriptor) request, + (io.grpc.stub.StreamObserver) responseObserver); + break; + case METHODID_GET_SCHEMA: + serviceImpl.getSchema((org.apache.arrow.flight.impl.Flight.FlightDescriptor) request, + (io.grpc.stub.StreamObserver) responseObserver); + break; + case METHODID_DO_GET: + serviceImpl.doGet((org.apache.arrow.flight.impl.Flight.Ticket) request, + (io.grpc.stub.StreamObserver) responseObserver); + break; + case METHODID_DO_ACTION: + serviceImpl.doAction((org.apache.arrow.flight.impl.Flight.Action) request, + (io.grpc.stub.StreamObserver) responseObserver); + break; + case METHODID_LIST_ACTIONS: + serviceImpl.listActions((org.apache.arrow.flight.impl.Flight.Empty) request, + (io.grpc.stub.StreamObserver) responseObserver); + break; + default: + throw new AssertionError(); + } + } + + @java.lang.Override + @java.lang.SuppressWarnings("unchecked") + public io.grpc.stub.StreamObserver invoke( + io.grpc.stub.StreamObserver responseObserver) { + switch (methodId) { + case METHODID_HANDSHAKE: + return (io.grpc.stub.StreamObserver) serviceImpl.handshake( + (io.grpc.stub.StreamObserver) responseObserver); + case METHODID_DO_PUT: + return (io.grpc.stub.StreamObserver) serviceImpl.doPut( + (io.grpc.stub.StreamObserver) responseObserver); + case METHODID_DO_EXCHANGE: + return (io.grpc.stub.StreamObserver) serviceImpl.doExchange( + (io.grpc.stub.StreamObserver) responseObserver); + default: + throw new AssertionError(); + } + } + } + + public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { + return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) + .addMethod( + getHandshakeMethod(), + io.grpc.stub.ServerCalls.asyncBidiStreamingCall( + new MethodHandlers< + org.apache.arrow.flight.impl.Flight.HandshakeRequest, + org.apache.arrow.flight.impl.Flight.HandshakeResponse>( + service, METHODID_HANDSHAKE))) + .addMethod( + getListFlightsMethod(), + io.grpc.stub.ServerCalls.asyncServerStreamingCall( + new MethodHandlers< + org.apache.arrow.flight.impl.Flight.Criteria, + org.apache.arrow.flight.impl.Flight.FlightInfo>( + service, METHODID_LIST_FLIGHTS))) + .addMethod( + getGetFlightInfoMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + org.apache.arrow.flight.impl.Flight.FlightDescriptor, + org.apache.arrow.flight.impl.Flight.FlightInfo>( + service, METHODID_GET_FLIGHT_INFO))) + .addMethod( + getPollFlightInfoMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + org.apache.arrow.flight.impl.Flight.FlightDescriptor, + org.apache.arrow.flight.impl.Flight.PollInfo>( + service, METHODID_POLL_FLIGHT_INFO))) + .addMethod( + getGetSchemaMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + org.apache.arrow.flight.impl.Flight.FlightDescriptor, + org.apache.arrow.flight.impl.Flight.SchemaResult>( + service, METHODID_GET_SCHEMA))) + .addMethod( + getDoGetMethod(), + io.grpc.stub.ServerCalls.asyncServerStreamingCall( + new MethodHandlers< + org.apache.arrow.flight.impl.Flight.Ticket, + org.apache.arrow.flight.impl.Flight.FlightData>( + service, METHODID_DO_GET))) + .addMethod( + getDoPutMethod(), + io.grpc.stub.ServerCalls.asyncBidiStreamingCall( + new MethodHandlers< + org.apache.arrow.flight.impl.Flight.FlightData, + org.apache.arrow.flight.impl.Flight.PutResult>( + service, METHODID_DO_PUT))) + .addMethod( + getDoExchangeMethod(), + io.grpc.stub.ServerCalls.asyncBidiStreamingCall( + new MethodHandlers< + org.apache.arrow.flight.impl.Flight.FlightData, + org.apache.arrow.flight.impl.Flight.FlightData>( + service, METHODID_DO_EXCHANGE))) + .addMethod( + getDoActionMethod(), + io.grpc.stub.ServerCalls.asyncServerStreamingCall( + new MethodHandlers< + org.apache.arrow.flight.impl.Flight.Action, + org.apache.arrow.flight.impl.Flight.Result>( + service, METHODID_DO_ACTION))) + .addMethod( + getListActionsMethod(), + io.grpc.stub.ServerCalls.asyncServerStreamingCall( + new MethodHandlers< + org.apache.arrow.flight.impl.Flight.Empty, + org.apache.arrow.flight.impl.Flight.ActionType>( + service, METHODID_LIST_ACTIONS))) + .build(); + } + + private static abstract class FlightServiceBaseDescriptorSupplier + implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { + FlightServiceBaseDescriptorSupplier() {} + + @java.lang.Override + public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { + return org.apache.arrow.flight.impl.Flight.getDescriptor(); + } + + @java.lang.Override + public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { + return getFileDescriptor().findServiceByName("FlightService"); + } + } + + private static final class FlightServiceFileDescriptorSupplier + extends FlightServiceBaseDescriptorSupplier { + FlightServiceFileDescriptorSupplier() {} + } + + private static final class FlightServiceMethodDescriptorSupplier + extends FlightServiceBaseDescriptorSupplier + implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { + private final java.lang.String methodName; + + FlightServiceMethodDescriptorSupplier(java.lang.String methodName) { + this.methodName = methodName; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { + return getServiceDescriptor().findMethodByName(methodName); + } + } + + private static volatile io.grpc.ServiceDescriptor serviceDescriptor; + + public static io.grpc.ServiceDescriptor getServiceDescriptor() { + io.grpc.ServiceDescriptor result = serviceDescriptor; + if (result == null) { + synchronized (FlightServiceGrpc.class) { + result = serviceDescriptor; + if (result == null) { + serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) + .setSchemaDescriptor(new FlightServiceFileDescriptorSupplier()) + .addMethod(getHandshakeMethod()) + .addMethod(getListFlightsMethod()) + .addMethod(getGetFlightInfoMethod()) + .addMethod(getPollFlightInfoMethod()) + .addMethod(getGetSchemaMethod()) + .addMethod(getDoGetMethod()) + .addMethod(getDoPutMethod()) + .addMethod(getDoExchangeMethod()) + .addMethod(getDoActionMethod()) + .addMethod(getListActionsMethod()) + .build(); + } + } + } + return result; + } +} diff --git a/java/flight/flight-core/target/generated-sources/protobuf/java/org/apache/arrow/flight/impl/Flight.java b/java/flight/flight-core/target/generated-sources/protobuf/java/org/apache/arrow/flight/impl/Flight.java new file mode 100644 index 000000000000..2f99dd016fbb --- /dev/null +++ b/java/flight/flight-core/target/generated-sources/protobuf/java/org/apache/arrow/flight/impl/Flight.java @@ -0,0 +1,22585 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: Flight.proto + +package org.apache.arrow.flight.impl; + +public final class Flight { + private Flight() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + /** + *
+   *
+   * The result of a cancel operation.
+   *
+   * This is used by CancelFlightInfoResult.status.
+   * 
+ * + * Protobuf enum {@code arrow.flight.protocol.CancelStatus} + */ + public enum CancelStatus + implements com.google.protobuf.ProtocolMessageEnum { + /** + *
+     * The cancellation status is unknown. Servers should avoid using
+     * this value (send a NOT_FOUND error if the requested query is
+     * not known). Clients can retry the request.
+     * 
+ * + * CANCEL_STATUS_UNSPECIFIED = 0; + */ + CANCEL_STATUS_UNSPECIFIED(0), + /** + *
+     * The cancellation request is complete. Subsequent requests with
+     * the same payload may return CANCELLED or a NOT_FOUND error.
+     * 
+ * + * CANCEL_STATUS_CANCELLED = 1; + */ + CANCEL_STATUS_CANCELLED(1), + /** + *
+     * The cancellation request is in progress. The client may retry
+     * the cancellation request.
+     * 
+ * + * CANCEL_STATUS_CANCELLING = 2; + */ + CANCEL_STATUS_CANCELLING(2), + /** + *
+     * The query is not cancellable. The client should not retry the
+     * cancellation request.
+     * 
+ * + * CANCEL_STATUS_NOT_CANCELLABLE = 3; + */ + CANCEL_STATUS_NOT_CANCELLABLE(3), + UNRECOGNIZED(-1), + ; + + /** + *
+     * The cancellation status is unknown. Servers should avoid using
+     * this value (send a NOT_FOUND error if the requested query is
+     * not known). Clients can retry the request.
+     * 
+ * + * CANCEL_STATUS_UNSPECIFIED = 0; + */ + public static final int CANCEL_STATUS_UNSPECIFIED_VALUE = 0; + /** + *
+     * The cancellation request is complete. Subsequent requests with
+     * the same payload may return CANCELLED or a NOT_FOUND error.
+     * 
+ * + * CANCEL_STATUS_CANCELLED = 1; + */ + public static final int CANCEL_STATUS_CANCELLED_VALUE = 1; + /** + *
+     * The cancellation request is in progress. The client may retry
+     * the cancellation request.
+     * 
+ * + * CANCEL_STATUS_CANCELLING = 2; + */ + public static final int CANCEL_STATUS_CANCELLING_VALUE = 2; + /** + *
+     * The query is not cancellable. The client should not retry the
+     * cancellation request.
+     * 
+ * + * CANCEL_STATUS_NOT_CANCELLABLE = 3; + */ + public static final int CANCEL_STATUS_NOT_CANCELLABLE_VALUE = 3; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static CancelStatus valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static CancelStatus forNumber(int value) { + switch (value) { + case 0: return CANCEL_STATUS_UNSPECIFIED; + case 1: return CANCEL_STATUS_CANCELLED; + case 2: return CANCEL_STATUS_CANCELLING; + case 3: return CANCEL_STATUS_NOT_CANCELLABLE; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + CancelStatus> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public CancelStatus findValueByNumber(int number) { + return CancelStatus.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.getDescriptor().getEnumTypes().get(0); + } + + private static final CancelStatus[] VALUES = values(); + + public static CancelStatus valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private CancelStatus(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.CancelStatus) + } + + public interface HandshakeRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.HandshakeRequest) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     *
+     * A defined protocol version
+     * 
+ * + * uint64 protocol_version = 1; + * @return The protocolVersion. + */ + long getProtocolVersion(); + + /** + *
+     *
+     * Arbitrary auth/handshake info.
+     * 
+ * + * bytes payload = 2; + * @return The payload. + */ + com.google.protobuf.ByteString getPayload(); + } + /** + *
+   *
+   * The request that a client provides to a server on handshake.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.HandshakeRequest} + */ + public static final class HandshakeRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.HandshakeRequest) + HandshakeRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use HandshakeRequest.newBuilder() to construct. + private HandshakeRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private HandshakeRequest() { + payload_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new HandshakeRequest(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_HandshakeRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_HandshakeRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.HandshakeRequest.class, org.apache.arrow.flight.impl.Flight.HandshakeRequest.Builder.class); + } + + public static final int PROTOCOL_VERSION_FIELD_NUMBER = 1; + private long protocolVersion_ = 0L; + /** + *
+     *
+     * A defined protocol version
+     * 
+ * + * uint64 protocol_version = 1; + * @return The protocolVersion. + */ + @java.lang.Override + public long getProtocolVersion() { + return protocolVersion_; + } + + public static final int PAYLOAD_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     *
+     * Arbitrary auth/handshake info.
+     * 
+ * + * bytes payload = 2; + * @return The payload. + */ + @java.lang.Override + public com.google.protobuf.ByteString getPayload() { + return payload_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (protocolVersion_ != 0L) { + output.writeUInt64(1, protocolVersion_); + } + if (!payload_.isEmpty()) { + output.writeBytes(2, payload_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (protocolVersion_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, protocolVersion_); + } + if (!payload_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, payload_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.HandshakeRequest)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.HandshakeRequest other = (org.apache.arrow.flight.impl.Flight.HandshakeRequest) obj; + + if (getProtocolVersion() + != other.getProtocolVersion()) return false; + if (!getPayload() + .equals(other.getPayload())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + PROTOCOL_VERSION_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getProtocolVersion()); + hash = (37 * hash) + PAYLOAD_FIELD_NUMBER; + hash = (53 * hash) + getPayload().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.HandshakeRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.HandshakeRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.HandshakeRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.HandshakeRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.HandshakeRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.HandshakeRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.HandshakeRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.HandshakeRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.HandshakeRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.HandshakeRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.HandshakeRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.HandshakeRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.HandshakeRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * The request that a client provides to a server on handshake.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.HandshakeRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.HandshakeRequest) + org.apache.arrow.flight.impl.Flight.HandshakeRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_HandshakeRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_HandshakeRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.HandshakeRequest.class, org.apache.arrow.flight.impl.Flight.HandshakeRequest.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.HandshakeRequest.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + protocolVersion_ = 0L; + payload_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_HandshakeRequest_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.HandshakeRequest getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.HandshakeRequest.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.HandshakeRequest build() { + org.apache.arrow.flight.impl.Flight.HandshakeRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.HandshakeRequest buildPartial() { + org.apache.arrow.flight.impl.Flight.HandshakeRequest result = new org.apache.arrow.flight.impl.Flight.HandshakeRequest(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.HandshakeRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.protocolVersion_ = protocolVersion_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.payload_ = payload_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.HandshakeRequest) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.HandshakeRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.HandshakeRequest other) { + if (other == org.apache.arrow.flight.impl.Flight.HandshakeRequest.getDefaultInstance()) return this; + if (other.getProtocolVersion() != 0L) { + setProtocolVersion(other.getProtocolVersion()); + } + if (other.getPayload() != com.google.protobuf.ByteString.EMPTY) { + setPayload(other.getPayload()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + protocolVersion_ = input.readUInt64(); + bitField0_ |= 0x00000001; + break; + } // case 8 + case 18: { + payload_ = input.readBytes(); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private long protocolVersion_ ; + /** + *
+       *
+       * A defined protocol version
+       * 
+ * + * uint64 protocol_version = 1; + * @return The protocolVersion. + */ + @java.lang.Override + public long getProtocolVersion() { + return protocolVersion_; + } + /** + *
+       *
+       * A defined protocol version
+       * 
+ * + * uint64 protocol_version = 1; + * @param value The protocolVersion to set. + * @return This builder for chaining. + */ + public Builder setProtocolVersion(long value) { + + protocolVersion_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       *
+       * A defined protocol version
+       * 
+ * + * uint64 protocol_version = 1; + * @return This builder for chaining. + */ + public Builder clearProtocolVersion() { + bitField0_ = (bitField0_ & ~0x00000001); + protocolVersion_ = 0L; + onChanged(); + return this; + } + + private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       *
+       * Arbitrary auth/handshake info.
+       * 
+ * + * bytes payload = 2; + * @return The payload. + */ + @java.lang.Override + public com.google.protobuf.ByteString getPayload() { + return payload_; + } + /** + *
+       *
+       * Arbitrary auth/handshake info.
+       * 
+ * + * bytes payload = 2; + * @param value The payload to set. + * @return This builder for chaining. + */ + public Builder setPayload(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + payload_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       *
+       * Arbitrary auth/handshake info.
+       * 
+ * + * bytes payload = 2; + * @return This builder for chaining. + */ + public Builder clearPayload() { + bitField0_ = (bitField0_ & ~0x00000002); + payload_ = getDefaultInstance().getPayload(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.HandshakeRequest) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.HandshakeRequest) + private static final org.apache.arrow.flight.impl.Flight.HandshakeRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.HandshakeRequest(); + } + + public static org.apache.arrow.flight.impl.Flight.HandshakeRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public HandshakeRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.HandshakeRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface HandshakeResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.HandshakeResponse) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     *
+     * A defined protocol version
+     * 
+ * + * uint64 protocol_version = 1; + * @return The protocolVersion. + */ + long getProtocolVersion(); + + /** + *
+     *
+     * Arbitrary auth/handshake info.
+     * 
+ * + * bytes payload = 2; + * @return The payload. + */ + com.google.protobuf.ByteString getPayload(); + } + /** + * Protobuf type {@code arrow.flight.protocol.HandshakeResponse} + */ + public static final class HandshakeResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.HandshakeResponse) + HandshakeResponseOrBuilder { + private static final long serialVersionUID = 0L; + // Use HandshakeResponse.newBuilder() to construct. + private HandshakeResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private HandshakeResponse() { + payload_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new HandshakeResponse(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_HandshakeResponse_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_HandshakeResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.HandshakeResponse.class, org.apache.arrow.flight.impl.Flight.HandshakeResponse.Builder.class); + } + + public static final int PROTOCOL_VERSION_FIELD_NUMBER = 1; + private long protocolVersion_ = 0L; + /** + *
+     *
+     * A defined protocol version
+     * 
+ * + * uint64 protocol_version = 1; + * @return The protocolVersion. + */ + @java.lang.Override + public long getProtocolVersion() { + return protocolVersion_; + } + + public static final int PAYLOAD_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     *
+     * Arbitrary auth/handshake info.
+     * 
+ * + * bytes payload = 2; + * @return The payload. + */ + @java.lang.Override + public com.google.protobuf.ByteString getPayload() { + return payload_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (protocolVersion_ != 0L) { + output.writeUInt64(1, protocolVersion_); + } + if (!payload_.isEmpty()) { + output.writeBytes(2, payload_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (protocolVersion_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, protocolVersion_); + } + if (!payload_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, payload_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.HandshakeResponse)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.HandshakeResponse other = (org.apache.arrow.flight.impl.Flight.HandshakeResponse) obj; + + if (getProtocolVersion() + != other.getProtocolVersion()) return false; + if (!getPayload() + .equals(other.getPayload())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + PROTOCOL_VERSION_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getProtocolVersion()); + hash = (37 * hash) + PAYLOAD_FIELD_NUMBER; + hash = (53 * hash) + getPayload().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.HandshakeResponse parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.HandshakeResponse parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.HandshakeResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.HandshakeResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.HandshakeResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.HandshakeResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.HandshakeResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.HandshakeResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.HandshakeResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.HandshakeResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.HandshakeResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.HandshakeResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.HandshakeResponse prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code arrow.flight.protocol.HandshakeResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.HandshakeResponse) + org.apache.arrow.flight.impl.Flight.HandshakeResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_HandshakeResponse_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_HandshakeResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.HandshakeResponse.class, org.apache.arrow.flight.impl.Flight.HandshakeResponse.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.HandshakeResponse.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + protocolVersion_ = 0L; + payload_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_HandshakeResponse_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.HandshakeResponse getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.HandshakeResponse.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.HandshakeResponse build() { + org.apache.arrow.flight.impl.Flight.HandshakeResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.HandshakeResponse buildPartial() { + org.apache.arrow.flight.impl.Flight.HandshakeResponse result = new org.apache.arrow.flight.impl.Flight.HandshakeResponse(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.HandshakeResponse result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.protocolVersion_ = protocolVersion_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.payload_ = payload_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.HandshakeResponse) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.HandshakeResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.HandshakeResponse other) { + if (other == org.apache.arrow.flight.impl.Flight.HandshakeResponse.getDefaultInstance()) return this; + if (other.getProtocolVersion() != 0L) { + setProtocolVersion(other.getProtocolVersion()); + } + if (other.getPayload() != com.google.protobuf.ByteString.EMPTY) { + setPayload(other.getPayload()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + protocolVersion_ = input.readUInt64(); + bitField0_ |= 0x00000001; + break; + } // case 8 + case 18: { + payload_ = input.readBytes(); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private long protocolVersion_ ; + /** + *
+       *
+       * A defined protocol version
+       * 
+ * + * uint64 protocol_version = 1; + * @return The protocolVersion. + */ + @java.lang.Override + public long getProtocolVersion() { + return protocolVersion_; + } + /** + *
+       *
+       * A defined protocol version
+       * 
+ * + * uint64 protocol_version = 1; + * @param value The protocolVersion to set. + * @return This builder for chaining. + */ + public Builder setProtocolVersion(long value) { + + protocolVersion_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       *
+       * A defined protocol version
+       * 
+ * + * uint64 protocol_version = 1; + * @return This builder for chaining. + */ + public Builder clearProtocolVersion() { + bitField0_ = (bitField0_ & ~0x00000001); + protocolVersion_ = 0L; + onChanged(); + return this; + } + + private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       *
+       * Arbitrary auth/handshake info.
+       * 
+ * + * bytes payload = 2; + * @return The payload. + */ + @java.lang.Override + public com.google.protobuf.ByteString getPayload() { + return payload_; + } + /** + *
+       *
+       * Arbitrary auth/handshake info.
+       * 
+ * + * bytes payload = 2; + * @param value The payload to set. + * @return This builder for chaining. + */ + public Builder setPayload(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + payload_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       *
+       * Arbitrary auth/handshake info.
+       * 
+ * + * bytes payload = 2; + * @return This builder for chaining. + */ + public Builder clearPayload() { + bitField0_ = (bitField0_ & ~0x00000002); + payload_ = getDefaultInstance().getPayload(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.HandshakeResponse) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.HandshakeResponse) + private static final org.apache.arrow.flight.impl.Flight.HandshakeResponse DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.HandshakeResponse(); + } + + public static org.apache.arrow.flight.impl.Flight.HandshakeResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public HandshakeResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.HandshakeResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface BasicAuthOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.BasicAuth) + com.google.protobuf.MessageOrBuilder { + + /** + * string username = 2; + * @return The username. + */ + java.lang.String getUsername(); + /** + * string username = 2; + * @return The bytes for username. + */ + com.google.protobuf.ByteString + getUsernameBytes(); + + /** + * string password = 3; + * @return The password. + */ + java.lang.String getPassword(); + /** + * string password = 3; + * @return The bytes for password. + */ + com.google.protobuf.ByteString + getPasswordBytes(); + } + /** + *
+   *
+   * A message for doing simple auth.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.BasicAuth} + */ + public static final class BasicAuth extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.BasicAuth) + BasicAuthOrBuilder { + private static final long serialVersionUID = 0L; + // Use BasicAuth.newBuilder() to construct. + private BasicAuth(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private BasicAuth() { + username_ = ""; + password_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new BasicAuth(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_BasicAuth_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_BasicAuth_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.BasicAuth.class, org.apache.arrow.flight.impl.Flight.BasicAuth.Builder.class); + } + + public static final int USERNAME_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private volatile java.lang.Object username_ = ""; + /** + * string username = 2; + * @return The username. + */ + @java.lang.Override + public java.lang.String getUsername() { + java.lang.Object ref = username_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + username_ = s; + return s; + } + } + /** + * string username = 2; + * @return The bytes for username. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getUsernameBytes() { + java.lang.Object ref = username_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + username_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int PASSWORD_FIELD_NUMBER = 3; + @SuppressWarnings("serial") + private volatile java.lang.Object password_ = ""; + /** + * string password = 3; + * @return The password. + */ + @java.lang.Override + public java.lang.String getPassword() { + java.lang.Object ref = password_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + password_ = s; + return s; + } + } + /** + * string password = 3; + * @return The bytes for password. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getPasswordBytes() { + java.lang.Object ref = password_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + password_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(username_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, username_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(password_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, password_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(username_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, username_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(password_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, password_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.BasicAuth)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.BasicAuth other = (org.apache.arrow.flight.impl.Flight.BasicAuth) obj; + + if (!getUsername() + .equals(other.getUsername())) return false; + if (!getPassword() + .equals(other.getPassword())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + USERNAME_FIELD_NUMBER; + hash = (53 * hash) + getUsername().hashCode(); + hash = (37 * hash) + PASSWORD_FIELD_NUMBER; + hash = (53 * hash) + getPassword().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.BasicAuth parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.BasicAuth parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.BasicAuth parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.BasicAuth parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.BasicAuth parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.BasicAuth parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.BasicAuth parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.BasicAuth parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.BasicAuth parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.BasicAuth parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.BasicAuth parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.BasicAuth parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.BasicAuth prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * A message for doing simple auth.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.BasicAuth} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.BasicAuth) + org.apache.arrow.flight.impl.Flight.BasicAuthOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_BasicAuth_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_BasicAuth_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.BasicAuth.class, org.apache.arrow.flight.impl.Flight.BasicAuth.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.BasicAuth.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + username_ = ""; + password_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_BasicAuth_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.BasicAuth getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.BasicAuth.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.BasicAuth build() { + org.apache.arrow.flight.impl.Flight.BasicAuth result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.BasicAuth buildPartial() { + org.apache.arrow.flight.impl.Flight.BasicAuth result = new org.apache.arrow.flight.impl.Flight.BasicAuth(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.BasicAuth result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.username_ = username_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.password_ = password_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.BasicAuth) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.BasicAuth)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.BasicAuth other) { + if (other == org.apache.arrow.flight.impl.Flight.BasicAuth.getDefaultInstance()) return this; + if (!other.getUsername().isEmpty()) { + username_ = other.username_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (!other.getPassword().isEmpty()) { + password_ = other.password_; + bitField0_ |= 0x00000002; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 18: { + username_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 18 + case 26: { + password_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 26 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object username_ = ""; + /** + * string username = 2; + * @return The username. + */ + public java.lang.String getUsername() { + java.lang.Object ref = username_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + username_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string username = 2; + * @return The bytes for username. + */ + public com.google.protobuf.ByteString + getUsernameBytes() { + java.lang.Object ref = username_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + username_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string username = 2; + * @param value The username to set. + * @return This builder for chaining. + */ + public Builder setUsername( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + username_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * string username = 2; + * @return This builder for chaining. + */ + public Builder clearUsername() { + username_ = getDefaultInstance().getUsername(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * string username = 2; + * @param value The bytes for username to set. + * @return This builder for chaining. + */ + public Builder setUsernameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + username_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private java.lang.Object password_ = ""; + /** + * string password = 3; + * @return The password. + */ + public java.lang.String getPassword() { + java.lang.Object ref = password_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + password_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string password = 3; + * @return The bytes for password. + */ + public com.google.protobuf.ByteString + getPasswordBytes() { + java.lang.Object ref = password_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + password_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string password = 3; + * @param value The password to set. + * @return This builder for chaining. + */ + public Builder setPassword( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + password_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * string password = 3; + * @return This builder for chaining. + */ + public Builder clearPassword() { + password_ = getDefaultInstance().getPassword(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + * string password = 3; + * @param value The bytes for password to set. + * @return This builder for chaining. + */ + public Builder setPasswordBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + password_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.BasicAuth) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.BasicAuth) + private static final org.apache.arrow.flight.impl.Flight.BasicAuth DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.BasicAuth(); + } + + public static org.apache.arrow.flight.impl.Flight.BasicAuth getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public BasicAuth parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.BasicAuth getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface EmptyOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.Empty) + com.google.protobuf.MessageOrBuilder { + } + /** + * Protobuf type {@code arrow.flight.protocol.Empty} + */ + public static final class Empty extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.Empty) + EmptyOrBuilder { + private static final long serialVersionUID = 0L; + // Use Empty.newBuilder() to construct. + private Empty(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Empty() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new Empty(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Empty_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Empty_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.Empty.class, org.apache.arrow.flight.impl.Flight.Empty.Builder.class); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.Empty)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.Empty other = (org.apache.arrow.flight.impl.Flight.Empty) obj; + + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.Empty parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.Empty parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Empty parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.Empty parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Empty parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.Empty parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Empty parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.Empty parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.Empty parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.Empty parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Empty parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.Empty parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.Empty prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code arrow.flight.protocol.Empty} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.Empty) + org.apache.arrow.flight.impl.Flight.EmptyOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Empty_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Empty_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.Empty.class, org.apache.arrow.flight.impl.Flight.Empty.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.Empty.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Empty_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Empty getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.Empty.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Empty build() { + org.apache.arrow.flight.impl.Flight.Empty result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Empty buildPartial() { + org.apache.arrow.flight.impl.Flight.Empty result = new org.apache.arrow.flight.impl.Flight.Empty(this); + onBuilt(); + return result; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.Empty) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.Empty)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.Empty other) { + if (other == org.apache.arrow.flight.impl.Flight.Empty.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.Empty) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.Empty) + private static final org.apache.arrow.flight.impl.Flight.Empty DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.Empty(); + } + + public static org.apache.arrow.flight.impl.Flight.Empty getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Empty parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Empty getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface ActionTypeOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.ActionType) + com.google.protobuf.MessageOrBuilder { + + /** + * string type = 1; + * @return The type. + */ + java.lang.String getType(); + /** + * string type = 1; + * @return The bytes for type. + */ + com.google.protobuf.ByteString + getTypeBytes(); + + /** + * string description = 2; + * @return The description. + */ + java.lang.String getDescription(); + /** + * string description = 2; + * @return The bytes for description. + */ + com.google.protobuf.ByteString + getDescriptionBytes(); + } + /** + *
+   *
+   * Describes an available action, including both the name used for execution
+   * along with a short description of the purpose of the action.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.ActionType} + */ + public static final class ActionType extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.ActionType) + ActionTypeOrBuilder { + private static final long serialVersionUID = 0L; + // Use ActionType.newBuilder() to construct. + private ActionType(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ActionType() { + type_ = ""; + description_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new ActionType(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_ActionType_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_ActionType_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.ActionType.class, org.apache.arrow.flight.impl.Flight.ActionType.Builder.class); + } + + public static final int TYPE_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object type_ = ""; + /** + * string type = 1; + * @return The type. + */ + @java.lang.Override + public java.lang.String getType() { + java.lang.Object ref = type_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + type_ = s; + return s; + } + } + /** + * string type = 1; + * @return The bytes for type. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getTypeBytes() { + java.lang.Object ref = type_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + type_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DESCRIPTION_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private volatile java.lang.Object description_ = ""; + /** + * string description = 2; + * @return The description. + */ + @java.lang.Override + public java.lang.String getDescription() { + java.lang.Object ref = description_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + description_ = s; + return s; + } + } + /** + * string description = 2; + * @return The bytes for description. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getDescriptionBytes() { + java.lang.Object ref = description_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + description_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, type_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(description_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, description_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, type_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(description_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, description_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.ActionType)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.ActionType other = (org.apache.arrow.flight.impl.Flight.ActionType) obj; + + if (!getType() + .equals(other.getType())) return false; + if (!getDescription() + .equals(other.getDescription())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + TYPE_FIELD_NUMBER; + hash = (53 * hash) + getType().hashCode(); + hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER; + hash = (53 * hash) + getDescription().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.ActionType parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.ActionType parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.ActionType parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.ActionType parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.ActionType parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.ActionType parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.ActionType parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.ActionType parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.ActionType parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.ActionType parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.ActionType parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.ActionType parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.ActionType prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Describes an available action, including both the name used for execution
+     * along with a short description of the purpose of the action.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.ActionType} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.ActionType) + org.apache.arrow.flight.impl.Flight.ActionTypeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_ActionType_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_ActionType_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.ActionType.class, org.apache.arrow.flight.impl.Flight.ActionType.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.ActionType.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + type_ = ""; + description_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_ActionType_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.ActionType getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.ActionType.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.ActionType build() { + org.apache.arrow.flight.impl.Flight.ActionType result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.ActionType buildPartial() { + org.apache.arrow.flight.impl.Flight.ActionType result = new org.apache.arrow.flight.impl.Flight.ActionType(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.ActionType result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.type_ = type_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.description_ = description_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.ActionType) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.ActionType)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.ActionType other) { + if (other == org.apache.arrow.flight.impl.Flight.ActionType.getDefaultInstance()) return this; + if (!other.getType().isEmpty()) { + type_ = other.type_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (!other.getDescription().isEmpty()) { + description_ = other.description_; + bitField0_ |= 0x00000002; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + type_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + description_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object type_ = ""; + /** + * string type = 1; + * @return The type. + */ + public java.lang.String getType() { + java.lang.Object ref = type_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + type_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string type = 1; + * @return The bytes for type. + */ + public com.google.protobuf.ByteString + getTypeBytes() { + java.lang.Object ref = type_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + type_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string type = 1; + * @param value The type to set. + * @return This builder for chaining. + */ + public Builder setType( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + type_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * string type = 1; + * @return This builder for chaining. + */ + public Builder clearType() { + type_ = getDefaultInstance().getType(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * string type = 1; + * @param value The bytes for type to set. + * @return This builder for chaining. + */ + public Builder setTypeBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + type_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private java.lang.Object description_ = ""; + /** + * string description = 2; + * @return The description. + */ + public java.lang.String getDescription() { + java.lang.Object ref = description_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + description_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string description = 2; + * @return The bytes for description. + */ + public com.google.protobuf.ByteString + getDescriptionBytes() { + java.lang.Object ref = description_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + description_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string description = 2; + * @param value The description to set. + * @return This builder for chaining. + */ + public Builder setDescription( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + description_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * string description = 2; + * @return This builder for chaining. + */ + public Builder clearDescription() { + description_ = getDefaultInstance().getDescription(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + * string description = 2; + * @param value The bytes for description to set. + * @return This builder for chaining. + */ + public Builder setDescriptionBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + description_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.ActionType) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.ActionType) + private static final org.apache.arrow.flight.impl.Flight.ActionType DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.ActionType(); + } + + public static org.apache.arrow.flight.impl.Flight.ActionType getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ActionType parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.ActionType getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface CriteriaOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.Criteria) + com.google.protobuf.MessageOrBuilder { + + /** + * bytes expression = 1; + * @return The expression. + */ + com.google.protobuf.ByteString getExpression(); + } + /** + *
+   *
+   * A service specific expression that can be used to return a limited set
+   * of available Arrow Flight streams.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.Criteria} + */ + public static final class Criteria extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.Criteria) + CriteriaOrBuilder { + private static final long serialVersionUID = 0L; + // Use Criteria.newBuilder() to construct. + private Criteria(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Criteria() { + expression_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new Criteria(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Criteria_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Criteria_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.Criteria.class, org.apache.arrow.flight.impl.Flight.Criteria.Builder.class); + } + + public static final int EXPRESSION_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString expression_ = com.google.protobuf.ByteString.EMPTY; + /** + * bytes expression = 1; + * @return The expression. + */ + @java.lang.Override + public com.google.protobuf.ByteString getExpression() { + return expression_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!expression_.isEmpty()) { + output.writeBytes(1, expression_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!expression_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, expression_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.Criteria)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.Criteria other = (org.apache.arrow.flight.impl.Flight.Criteria) obj; + + if (!getExpression() + .equals(other.getExpression())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + EXPRESSION_FIELD_NUMBER; + hash = (53 * hash) + getExpression().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.Criteria parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.Criteria parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Criteria parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.Criteria parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Criteria parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.Criteria parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Criteria parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.Criteria parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.Criteria parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.Criteria parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Criteria parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.Criteria parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.Criteria prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * A service specific expression that can be used to return a limited set
+     * of available Arrow Flight streams.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.Criteria} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.Criteria) + org.apache.arrow.flight.impl.Flight.CriteriaOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Criteria_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Criteria_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.Criteria.class, org.apache.arrow.flight.impl.Flight.Criteria.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.Criteria.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + expression_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Criteria_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Criteria getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.Criteria.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Criteria build() { + org.apache.arrow.flight.impl.Flight.Criteria result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Criteria buildPartial() { + org.apache.arrow.flight.impl.Flight.Criteria result = new org.apache.arrow.flight.impl.Flight.Criteria(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.Criteria result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.expression_ = expression_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.Criteria) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.Criteria)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.Criteria other) { + if (other == org.apache.arrow.flight.impl.Flight.Criteria.getDefaultInstance()) return this; + if (other.getExpression() != com.google.protobuf.ByteString.EMPTY) { + setExpression(other.getExpression()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + expression_ = input.readBytes(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.ByteString expression_ = com.google.protobuf.ByteString.EMPTY; + /** + * bytes expression = 1; + * @return The expression. + */ + @java.lang.Override + public com.google.protobuf.ByteString getExpression() { + return expression_; + } + /** + * bytes expression = 1; + * @param value The expression to set. + * @return This builder for chaining. + */ + public Builder setExpression(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + expression_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * bytes expression = 1; + * @return This builder for chaining. + */ + public Builder clearExpression() { + bitField0_ = (bitField0_ & ~0x00000001); + expression_ = getDefaultInstance().getExpression(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.Criteria) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.Criteria) + private static final org.apache.arrow.flight.impl.Flight.Criteria DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.Criteria(); + } + + public static org.apache.arrow.flight.impl.Flight.Criteria getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Criteria parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Criteria getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface ActionOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.Action) + com.google.protobuf.MessageOrBuilder { + + /** + * string type = 1; + * @return The type. + */ + java.lang.String getType(); + /** + * string type = 1; + * @return The bytes for type. + */ + com.google.protobuf.ByteString + getTypeBytes(); + + /** + * bytes body = 2; + * @return The body. + */ + com.google.protobuf.ByteString getBody(); + } + /** + *
+   *
+   * An opaque action specific for the service.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.Action} + */ + public static final class Action extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.Action) + ActionOrBuilder { + private static final long serialVersionUID = 0L; + // Use Action.newBuilder() to construct. + private Action(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Action() { + type_ = ""; + body_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new Action(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Action_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Action_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.Action.class, org.apache.arrow.flight.impl.Flight.Action.Builder.class); + } + + public static final int TYPE_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object type_ = ""; + /** + * string type = 1; + * @return The type. + */ + @java.lang.Override + public java.lang.String getType() { + java.lang.Object ref = type_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + type_ = s; + return s; + } + } + /** + * string type = 1; + * @return The bytes for type. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getTypeBytes() { + java.lang.Object ref = type_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + type_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int BODY_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString body_ = com.google.protobuf.ByteString.EMPTY; + /** + * bytes body = 2; + * @return The body. + */ + @java.lang.Override + public com.google.protobuf.ByteString getBody() { + return body_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, type_); + } + if (!body_.isEmpty()) { + output.writeBytes(2, body_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, type_); + } + if (!body_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, body_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.Action)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.Action other = (org.apache.arrow.flight.impl.Flight.Action) obj; + + if (!getType() + .equals(other.getType())) return false; + if (!getBody() + .equals(other.getBody())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + TYPE_FIELD_NUMBER; + hash = (53 * hash) + getType().hashCode(); + hash = (37 * hash) + BODY_FIELD_NUMBER; + hash = (53 * hash) + getBody().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.Action parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.Action parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Action parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.Action parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Action parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.Action parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Action parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.Action parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.Action parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.Action parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Action parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.Action parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.Action prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * An opaque action specific for the service.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.Action} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.Action) + org.apache.arrow.flight.impl.Flight.ActionOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Action_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Action_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.Action.class, org.apache.arrow.flight.impl.Flight.Action.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.Action.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + type_ = ""; + body_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Action_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Action getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.Action.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Action build() { + org.apache.arrow.flight.impl.Flight.Action result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Action buildPartial() { + org.apache.arrow.flight.impl.Flight.Action result = new org.apache.arrow.flight.impl.Flight.Action(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.Action result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.type_ = type_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.body_ = body_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.Action) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.Action)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.Action other) { + if (other == org.apache.arrow.flight.impl.Flight.Action.getDefaultInstance()) return this; + if (!other.getType().isEmpty()) { + type_ = other.type_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.getBody() != com.google.protobuf.ByteString.EMPTY) { + setBody(other.getBody()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + type_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + body_ = input.readBytes(); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object type_ = ""; + /** + * string type = 1; + * @return The type. + */ + public java.lang.String getType() { + java.lang.Object ref = type_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + type_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string type = 1; + * @return The bytes for type. + */ + public com.google.protobuf.ByteString + getTypeBytes() { + java.lang.Object ref = type_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + type_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string type = 1; + * @param value The type to set. + * @return This builder for chaining. + */ + public Builder setType( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + type_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * string type = 1; + * @return This builder for chaining. + */ + public Builder clearType() { + type_ = getDefaultInstance().getType(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * string type = 1; + * @param value The bytes for type to set. + * @return This builder for chaining. + */ + public Builder setTypeBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + type_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private com.google.protobuf.ByteString body_ = com.google.protobuf.ByteString.EMPTY; + /** + * bytes body = 2; + * @return The body. + */ + @java.lang.Override + public com.google.protobuf.ByteString getBody() { + return body_; + } + /** + * bytes body = 2; + * @param value The body to set. + * @return This builder for chaining. + */ + public Builder setBody(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + body_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * bytes body = 2; + * @return This builder for chaining. + */ + public Builder clearBody() { + bitField0_ = (bitField0_ & ~0x00000002); + body_ = getDefaultInstance().getBody(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.Action) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.Action) + private static final org.apache.arrow.flight.impl.Flight.Action DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.Action(); + } + + public static org.apache.arrow.flight.impl.Flight.Action getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Action parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Action getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface CancelFlightInfoRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.CancelFlightInfoRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * .arrow.flight.protocol.FlightInfo info = 1; + * @return Whether the info field is set. + */ + boolean hasInfo(); + /** + * .arrow.flight.protocol.FlightInfo info = 1; + * @return The info. + */ + org.apache.arrow.flight.impl.Flight.FlightInfo getInfo(); + /** + * .arrow.flight.protocol.FlightInfo info = 1; + */ + org.apache.arrow.flight.impl.Flight.FlightInfoOrBuilder getInfoOrBuilder(); + } + /** + *
+   *
+   * The request of the CancelFlightInfo action.
+   *
+   * The request should be stored in Action.body.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.CancelFlightInfoRequest} + */ + public static final class CancelFlightInfoRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.CancelFlightInfoRequest) + CancelFlightInfoRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use CancelFlightInfoRequest.newBuilder() to construct. + private CancelFlightInfoRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private CancelFlightInfoRequest() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new CancelFlightInfoRequest(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_CancelFlightInfoRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_CancelFlightInfoRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest.class, org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest.Builder.class); + } + + public static final int INFO_FIELD_NUMBER = 1; + private org.apache.arrow.flight.impl.Flight.FlightInfo info_; + /** + * .arrow.flight.protocol.FlightInfo info = 1; + * @return Whether the info field is set. + */ + @java.lang.Override + public boolean hasInfo() { + return info_ != null; + } + /** + * .arrow.flight.protocol.FlightInfo info = 1; + * @return The info. + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightInfo getInfo() { + return info_ == null ? org.apache.arrow.flight.impl.Flight.FlightInfo.getDefaultInstance() : info_; + } + /** + * .arrow.flight.protocol.FlightInfo info = 1; + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightInfoOrBuilder getInfoOrBuilder() { + return info_ == null ? org.apache.arrow.flight.impl.Flight.FlightInfo.getDefaultInstance() : info_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (info_ != null) { + output.writeMessage(1, getInfo()); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (info_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getInfo()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest other = (org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest) obj; + + if (hasInfo() != other.hasInfo()) return false; + if (hasInfo()) { + if (!getInfo() + .equals(other.getInfo())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasInfo()) { + hash = (37 * hash) + INFO_FIELD_NUMBER; + hash = (53 * hash) + getInfo().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * The request of the CancelFlightInfo action.
+     *
+     * The request should be stored in Action.body.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.CancelFlightInfoRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.CancelFlightInfoRequest) + org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_CancelFlightInfoRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_CancelFlightInfoRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest.class, org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + info_ = null; + if (infoBuilder_ != null) { + infoBuilder_.dispose(); + infoBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_CancelFlightInfoRequest_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest build() { + org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest buildPartial() { + org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest result = new org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.info_ = infoBuilder_ == null + ? info_ + : infoBuilder_.build(); + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest other) { + if (other == org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest.getDefaultInstance()) return this; + if (other.hasInfo()) { + mergeInfo(other.getInfo()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + input.readMessage( + getInfoFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private org.apache.arrow.flight.impl.Flight.FlightInfo info_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.FlightInfo, org.apache.arrow.flight.impl.Flight.FlightInfo.Builder, org.apache.arrow.flight.impl.Flight.FlightInfoOrBuilder> infoBuilder_; + /** + * .arrow.flight.protocol.FlightInfo info = 1; + * @return Whether the info field is set. + */ + public boolean hasInfo() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * .arrow.flight.protocol.FlightInfo info = 1; + * @return The info. + */ + public org.apache.arrow.flight.impl.Flight.FlightInfo getInfo() { + if (infoBuilder_ == null) { + return info_ == null ? org.apache.arrow.flight.impl.Flight.FlightInfo.getDefaultInstance() : info_; + } else { + return infoBuilder_.getMessage(); + } + } + /** + * .arrow.flight.protocol.FlightInfo info = 1; + */ + public Builder setInfo(org.apache.arrow.flight.impl.Flight.FlightInfo value) { + if (infoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + info_ = value; + } else { + infoBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * .arrow.flight.protocol.FlightInfo info = 1; + */ + public Builder setInfo( + org.apache.arrow.flight.impl.Flight.FlightInfo.Builder builderForValue) { + if (infoBuilder_ == null) { + info_ = builderForValue.build(); + } else { + infoBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * .arrow.flight.protocol.FlightInfo info = 1; + */ + public Builder mergeInfo(org.apache.arrow.flight.impl.Flight.FlightInfo value) { + if (infoBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0) && + info_ != null && + info_ != org.apache.arrow.flight.impl.Flight.FlightInfo.getDefaultInstance()) { + getInfoBuilder().mergeFrom(value); + } else { + info_ = value; + } + } else { + infoBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * .arrow.flight.protocol.FlightInfo info = 1; + */ + public Builder clearInfo() { + bitField0_ = (bitField0_ & ~0x00000001); + info_ = null; + if (infoBuilder_ != null) { + infoBuilder_.dispose(); + infoBuilder_ = null; + } + onChanged(); + return this; + } + /** + * .arrow.flight.protocol.FlightInfo info = 1; + */ + public org.apache.arrow.flight.impl.Flight.FlightInfo.Builder getInfoBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getInfoFieldBuilder().getBuilder(); + } + /** + * .arrow.flight.protocol.FlightInfo info = 1; + */ + public org.apache.arrow.flight.impl.Flight.FlightInfoOrBuilder getInfoOrBuilder() { + if (infoBuilder_ != null) { + return infoBuilder_.getMessageOrBuilder(); + } else { + return info_ == null ? + org.apache.arrow.flight.impl.Flight.FlightInfo.getDefaultInstance() : info_; + } + } + /** + * .arrow.flight.protocol.FlightInfo info = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.FlightInfo, org.apache.arrow.flight.impl.Flight.FlightInfo.Builder, org.apache.arrow.flight.impl.Flight.FlightInfoOrBuilder> + getInfoFieldBuilder() { + if (infoBuilder_ == null) { + infoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.FlightInfo, org.apache.arrow.flight.impl.Flight.FlightInfo.Builder, org.apache.arrow.flight.impl.Flight.FlightInfoOrBuilder>( + getInfo(), + getParentForChildren(), + isClean()); + info_ = null; + } + return infoBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.CancelFlightInfoRequest) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.CancelFlightInfoRequest) + private static final org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest(); + } + + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CancelFlightInfoRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.CancelFlightInfoRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface RenewFlightEndpointRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.RenewFlightEndpointRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * .arrow.flight.protocol.FlightEndpoint endpoint = 1; + * @return Whether the endpoint field is set. + */ + boolean hasEndpoint(); + /** + * .arrow.flight.protocol.FlightEndpoint endpoint = 1; + * @return The endpoint. + */ + org.apache.arrow.flight.impl.Flight.FlightEndpoint getEndpoint(); + /** + * .arrow.flight.protocol.FlightEndpoint endpoint = 1; + */ + org.apache.arrow.flight.impl.Flight.FlightEndpointOrBuilder getEndpointOrBuilder(); + } + /** + *
+   *
+   * The request of the RenewFlightEndpoint action.
+   *
+   * The request should be stored in Action.body.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.RenewFlightEndpointRequest} + */ + public static final class RenewFlightEndpointRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.RenewFlightEndpointRequest) + RenewFlightEndpointRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use RenewFlightEndpointRequest.newBuilder() to construct. + private RenewFlightEndpointRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private RenewFlightEndpointRequest() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new RenewFlightEndpointRequest(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_RenewFlightEndpointRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_RenewFlightEndpointRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest.class, org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest.Builder.class); + } + + public static final int ENDPOINT_FIELD_NUMBER = 1; + private org.apache.arrow.flight.impl.Flight.FlightEndpoint endpoint_; + /** + * .arrow.flight.protocol.FlightEndpoint endpoint = 1; + * @return Whether the endpoint field is set. + */ + @java.lang.Override + public boolean hasEndpoint() { + return endpoint_ != null; + } + /** + * .arrow.flight.protocol.FlightEndpoint endpoint = 1; + * @return The endpoint. + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightEndpoint getEndpoint() { + return endpoint_ == null ? org.apache.arrow.flight.impl.Flight.FlightEndpoint.getDefaultInstance() : endpoint_; + } + /** + * .arrow.flight.protocol.FlightEndpoint endpoint = 1; + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightEndpointOrBuilder getEndpointOrBuilder() { + return endpoint_ == null ? org.apache.arrow.flight.impl.Flight.FlightEndpoint.getDefaultInstance() : endpoint_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (endpoint_ != null) { + output.writeMessage(1, getEndpoint()); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (endpoint_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getEndpoint()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest other = (org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest) obj; + + if (hasEndpoint() != other.hasEndpoint()) return false; + if (hasEndpoint()) { + if (!getEndpoint() + .equals(other.getEndpoint())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasEndpoint()) { + hash = (37 * hash) + ENDPOINT_FIELD_NUMBER; + hash = (53 * hash) + getEndpoint().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * The request of the RenewFlightEndpoint action.
+     *
+     * The request should be stored in Action.body.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.RenewFlightEndpointRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.RenewFlightEndpointRequest) + org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_RenewFlightEndpointRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_RenewFlightEndpointRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest.class, org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + endpoint_ = null; + if (endpointBuilder_ != null) { + endpointBuilder_.dispose(); + endpointBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_RenewFlightEndpointRequest_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest build() { + org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest buildPartial() { + org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest result = new org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.endpoint_ = endpointBuilder_ == null + ? endpoint_ + : endpointBuilder_.build(); + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest other) { + if (other == org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest.getDefaultInstance()) return this; + if (other.hasEndpoint()) { + mergeEndpoint(other.getEndpoint()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + input.readMessage( + getEndpointFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private org.apache.arrow.flight.impl.Flight.FlightEndpoint endpoint_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.FlightEndpoint, org.apache.arrow.flight.impl.Flight.FlightEndpoint.Builder, org.apache.arrow.flight.impl.Flight.FlightEndpointOrBuilder> endpointBuilder_; + /** + * .arrow.flight.protocol.FlightEndpoint endpoint = 1; + * @return Whether the endpoint field is set. + */ + public boolean hasEndpoint() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * .arrow.flight.protocol.FlightEndpoint endpoint = 1; + * @return The endpoint. + */ + public org.apache.arrow.flight.impl.Flight.FlightEndpoint getEndpoint() { + if (endpointBuilder_ == null) { + return endpoint_ == null ? org.apache.arrow.flight.impl.Flight.FlightEndpoint.getDefaultInstance() : endpoint_; + } else { + return endpointBuilder_.getMessage(); + } + } + /** + * .arrow.flight.protocol.FlightEndpoint endpoint = 1; + */ + public Builder setEndpoint(org.apache.arrow.flight.impl.Flight.FlightEndpoint value) { + if (endpointBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + endpoint_ = value; + } else { + endpointBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * .arrow.flight.protocol.FlightEndpoint endpoint = 1; + */ + public Builder setEndpoint( + org.apache.arrow.flight.impl.Flight.FlightEndpoint.Builder builderForValue) { + if (endpointBuilder_ == null) { + endpoint_ = builderForValue.build(); + } else { + endpointBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * .arrow.flight.protocol.FlightEndpoint endpoint = 1; + */ + public Builder mergeEndpoint(org.apache.arrow.flight.impl.Flight.FlightEndpoint value) { + if (endpointBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0) && + endpoint_ != null && + endpoint_ != org.apache.arrow.flight.impl.Flight.FlightEndpoint.getDefaultInstance()) { + getEndpointBuilder().mergeFrom(value); + } else { + endpoint_ = value; + } + } else { + endpointBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * .arrow.flight.protocol.FlightEndpoint endpoint = 1; + */ + public Builder clearEndpoint() { + bitField0_ = (bitField0_ & ~0x00000001); + endpoint_ = null; + if (endpointBuilder_ != null) { + endpointBuilder_.dispose(); + endpointBuilder_ = null; + } + onChanged(); + return this; + } + /** + * .arrow.flight.protocol.FlightEndpoint endpoint = 1; + */ + public org.apache.arrow.flight.impl.Flight.FlightEndpoint.Builder getEndpointBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getEndpointFieldBuilder().getBuilder(); + } + /** + * .arrow.flight.protocol.FlightEndpoint endpoint = 1; + */ + public org.apache.arrow.flight.impl.Flight.FlightEndpointOrBuilder getEndpointOrBuilder() { + if (endpointBuilder_ != null) { + return endpointBuilder_.getMessageOrBuilder(); + } else { + return endpoint_ == null ? + org.apache.arrow.flight.impl.Flight.FlightEndpoint.getDefaultInstance() : endpoint_; + } + } + /** + * .arrow.flight.protocol.FlightEndpoint endpoint = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.FlightEndpoint, org.apache.arrow.flight.impl.Flight.FlightEndpoint.Builder, org.apache.arrow.flight.impl.Flight.FlightEndpointOrBuilder> + getEndpointFieldBuilder() { + if (endpointBuilder_ == null) { + endpointBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.FlightEndpoint, org.apache.arrow.flight.impl.Flight.FlightEndpoint.Builder, org.apache.arrow.flight.impl.Flight.FlightEndpointOrBuilder>( + getEndpoint(), + getParentForChildren(), + isClean()); + endpoint_ = null; + } + return endpointBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.RenewFlightEndpointRequest) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.RenewFlightEndpointRequest) + private static final org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest(); + } + + public static org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public RenewFlightEndpointRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.RenewFlightEndpointRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface ResultOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.Result) + com.google.protobuf.MessageOrBuilder { + + /** + * bytes body = 1; + * @return The body. + */ + com.google.protobuf.ByteString getBody(); + } + /** + *
+   *
+   * An opaque result returned after executing an action.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.Result} + */ + public static final class Result extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.Result) + ResultOrBuilder { + private static final long serialVersionUID = 0L; + // Use Result.newBuilder() to construct. + private Result(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Result() { + body_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new Result(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Result_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Result_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.Result.class, org.apache.arrow.flight.impl.Flight.Result.Builder.class); + } + + public static final int BODY_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString body_ = com.google.protobuf.ByteString.EMPTY; + /** + * bytes body = 1; + * @return The body. + */ + @java.lang.Override + public com.google.protobuf.ByteString getBody() { + return body_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!body_.isEmpty()) { + output.writeBytes(1, body_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!body_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, body_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.Result)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.Result other = (org.apache.arrow.flight.impl.Flight.Result) obj; + + if (!getBody() + .equals(other.getBody())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + BODY_FIELD_NUMBER; + hash = (53 * hash) + getBody().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.Result parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.Result parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Result parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.Result parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Result parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.Result parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Result parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.Result parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.Result parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.Result parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Result parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.Result parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.Result prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * An opaque result returned after executing an action.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.Result} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.Result) + org.apache.arrow.flight.impl.Flight.ResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Result_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Result_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.Result.class, org.apache.arrow.flight.impl.Flight.Result.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.Result.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + body_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Result_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Result getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.Result.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Result build() { + org.apache.arrow.flight.impl.Flight.Result result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Result buildPartial() { + org.apache.arrow.flight.impl.Flight.Result result = new org.apache.arrow.flight.impl.Flight.Result(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.Result result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.body_ = body_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.Result) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.Result)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.Result other) { + if (other == org.apache.arrow.flight.impl.Flight.Result.getDefaultInstance()) return this; + if (other.getBody() != com.google.protobuf.ByteString.EMPTY) { + setBody(other.getBody()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + body_ = input.readBytes(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.ByteString body_ = com.google.protobuf.ByteString.EMPTY; + /** + * bytes body = 1; + * @return The body. + */ + @java.lang.Override + public com.google.protobuf.ByteString getBody() { + return body_; + } + /** + * bytes body = 1; + * @param value The body to set. + * @return This builder for chaining. + */ + public Builder setBody(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + body_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * bytes body = 1; + * @return This builder for chaining. + */ + public Builder clearBody() { + bitField0_ = (bitField0_ & ~0x00000001); + body_ = getDefaultInstance().getBody(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.Result) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.Result) + private static final org.apache.arrow.flight.impl.Flight.Result DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.Result(); + } + + public static org.apache.arrow.flight.impl.Flight.Result getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Result parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Result getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface CancelFlightInfoResultOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.CancelFlightInfoResult) + com.google.protobuf.MessageOrBuilder { + + /** + * .arrow.flight.protocol.CancelStatus status = 1; + * @return The enum numeric value on the wire for status. + */ + int getStatusValue(); + /** + * .arrow.flight.protocol.CancelStatus status = 1; + * @return The status. + */ + org.apache.arrow.flight.impl.Flight.CancelStatus getStatus(); + } + /** + *
+   *
+   * The result of the CancelFlightInfo action.
+   *
+   * The result should be stored in Result.body.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.CancelFlightInfoResult} + */ + public static final class CancelFlightInfoResult extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.CancelFlightInfoResult) + CancelFlightInfoResultOrBuilder { + private static final long serialVersionUID = 0L; + // Use CancelFlightInfoResult.newBuilder() to construct. + private CancelFlightInfoResult(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private CancelFlightInfoResult() { + status_ = 0; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new CancelFlightInfoResult(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_CancelFlightInfoResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_CancelFlightInfoResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult.class, org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult.Builder.class); + } + + public static final int STATUS_FIELD_NUMBER = 1; + private int status_ = 0; + /** + * .arrow.flight.protocol.CancelStatus status = 1; + * @return The enum numeric value on the wire for status. + */ + @java.lang.Override public int getStatusValue() { + return status_; + } + /** + * .arrow.flight.protocol.CancelStatus status = 1; + * @return The status. + */ + @java.lang.Override public org.apache.arrow.flight.impl.Flight.CancelStatus getStatus() { + org.apache.arrow.flight.impl.Flight.CancelStatus result = org.apache.arrow.flight.impl.Flight.CancelStatus.forNumber(status_); + return result == null ? org.apache.arrow.flight.impl.Flight.CancelStatus.UNRECOGNIZED : result; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (status_ != org.apache.arrow.flight.impl.Flight.CancelStatus.CANCEL_STATUS_UNSPECIFIED.getNumber()) { + output.writeEnum(1, status_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (status_ != org.apache.arrow.flight.impl.Flight.CancelStatus.CANCEL_STATUS_UNSPECIFIED.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(1, status_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult other = (org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult) obj; + + if (status_ != other.status_) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + STATUS_FIELD_NUMBER; + hash = (53 * hash) + status_; + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * The result of the CancelFlightInfo action.
+     *
+     * The result should be stored in Result.body.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.CancelFlightInfoResult} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.CancelFlightInfoResult) + org.apache.arrow.flight.impl.Flight.CancelFlightInfoResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_CancelFlightInfoResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_CancelFlightInfoResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult.class, org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + status_ = 0; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_CancelFlightInfoResult_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult build() { + org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult buildPartial() { + org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult result = new org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.status_ = status_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult other) { + if (other == org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult.getDefaultInstance()) return this; + if (other.status_ != 0) { + setStatusValue(other.getStatusValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + status_ = input.readEnum(); + bitField0_ |= 0x00000001; + break; + } // case 8 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private int status_ = 0; + /** + * .arrow.flight.protocol.CancelStatus status = 1; + * @return The enum numeric value on the wire for status. + */ + @java.lang.Override public int getStatusValue() { + return status_; + } + /** + * .arrow.flight.protocol.CancelStatus status = 1; + * @param value The enum numeric value on the wire for status to set. + * @return This builder for chaining. + */ + public Builder setStatusValue(int value) { + status_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * .arrow.flight.protocol.CancelStatus status = 1; + * @return The status. + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.CancelStatus getStatus() { + org.apache.arrow.flight.impl.Flight.CancelStatus result = org.apache.arrow.flight.impl.Flight.CancelStatus.forNumber(status_); + return result == null ? org.apache.arrow.flight.impl.Flight.CancelStatus.UNRECOGNIZED : result; + } + /** + * .arrow.flight.protocol.CancelStatus status = 1; + * @param value The status to set. + * @return This builder for chaining. + */ + public Builder setStatus(org.apache.arrow.flight.impl.Flight.CancelStatus value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + status_ = value.getNumber(); + onChanged(); + return this; + } + /** + * .arrow.flight.protocol.CancelStatus status = 1; + * @return This builder for chaining. + */ + public Builder clearStatus() { + bitField0_ = (bitField0_ & ~0x00000001); + status_ = 0; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.CancelFlightInfoResult) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.CancelFlightInfoResult) + private static final org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult(); + } + + public static org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CancelFlightInfoResult parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.CancelFlightInfoResult getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface SchemaResultOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.SchemaResult) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * The schema of the dataset in its IPC form:
+     *   4 bytes - an optional IPC_CONTINUATION_TOKEN prefix
+     *   4 bytes - the byte length of the payload
+     *   a flatbuffer Message whose header is the Schema
+     * 
+ * + * bytes schema = 1; + * @return The schema. + */ + com.google.protobuf.ByteString getSchema(); + } + /** + *
+   *
+   * Wrap the result of a getSchema call
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.SchemaResult} + */ + public static final class SchemaResult extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.SchemaResult) + SchemaResultOrBuilder { + private static final long serialVersionUID = 0L; + // Use SchemaResult.newBuilder() to construct. + private SchemaResult(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private SchemaResult() { + schema_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new SchemaResult(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SchemaResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SchemaResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.SchemaResult.class, org.apache.arrow.flight.impl.Flight.SchemaResult.Builder.class); + } + + public static final int SCHEMA_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString schema_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     * The schema of the dataset in its IPC form:
+     *   4 bytes - an optional IPC_CONTINUATION_TOKEN prefix
+     *   4 bytes - the byte length of the payload
+     *   a flatbuffer Message whose header is the Schema
+     * 
+ * + * bytes schema = 1; + * @return The schema. + */ + @java.lang.Override + public com.google.protobuf.ByteString getSchema() { + return schema_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!schema_.isEmpty()) { + output.writeBytes(1, schema_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!schema_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, schema_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.SchemaResult)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.SchemaResult other = (org.apache.arrow.flight.impl.Flight.SchemaResult) obj; + + if (!getSchema() + .equals(other.getSchema())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + SCHEMA_FIELD_NUMBER; + hash = (53 * hash) + getSchema().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.SchemaResult parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.SchemaResult parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SchemaResult parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.SchemaResult parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SchemaResult parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.SchemaResult parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SchemaResult parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.SchemaResult parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.SchemaResult parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.SchemaResult parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SchemaResult parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.SchemaResult parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.SchemaResult prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Wrap the result of a getSchema call
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.SchemaResult} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.SchemaResult) + org.apache.arrow.flight.impl.Flight.SchemaResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SchemaResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SchemaResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.SchemaResult.class, org.apache.arrow.flight.impl.Flight.SchemaResult.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.SchemaResult.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + schema_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SchemaResult_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SchemaResult getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.SchemaResult.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SchemaResult build() { + org.apache.arrow.flight.impl.Flight.SchemaResult result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SchemaResult buildPartial() { + org.apache.arrow.flight.impl.Flight.SchemaResult result = new org.apache.arrow.flight.impl.Flight.SchemaResult(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.SchemaResult result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.schema_ = schema_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.SchemaResult) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.SchemaResult)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.SchemaResult other) { + if (other == org.apache.arrow.flight.impl.Flight.SchemaResult.getDefaultInstance()) return this; + if (other.getSchema() != com.google.protobuf.ByteString.EMPTY) { + setSchema(other.getSchema()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + schema_ = input.readBytes(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.ByteString schema_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * The schema of the dataset in its IPC form:
+       *   4 bytes - an optional IPC_CONTINUATION_TOKEN prefix
+       *   4 bytes - the byte length of the payload
+       *   a flatbuffer Message whose header is the Schema
+       * 
+ * + * bytes schema = 1; + * @return The schema. + */ + @java.lang.Override + public com.google.protobuf.ByteString getSchema() { + return schema_; + } + /** + *
+       * The schema of the dataset in its IPC form:
+       *   4 bytes - an optional IPC_CONTINUATION_TOKEN prefix
+       *   4 bytes - the byte length of the payload
+       *   a flatbuffer Message whose header is the Schema
+       * 
+ * + * bytes schema = 1; + * @param value The schema to set. + * @return This builder for chaining. + */ + public Builder setSchema(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + schema_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * The schema of the dataset in its IPC form:
+       *   4 bytes - an optional IPC_CONTINUATION_TOKEN prefix
+       *   4 bytes - the byte length of the payload
+       *   a flatbuffer Message whose header is the Schema
+       * 
+ * + * bytes schema = 1; + * @return This builder for chaining. + */ + public Builder clearSchema() { + bitField0_ = (bitField0_ & ~0x00000001); + schema_ = getDefaultInstance().getSchema(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.SchemaResult) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.SchemaResult) + private static final org.apache.arrow.flight.impl.Flight.SchemaResult DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.SchemaResult(); + } + + public static org.apache.arrow.flight.impl.Flight.SchemaResult getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public SchemaResult parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SchemaResult getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface FlightDescriptorOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.FlightDescriptor) + com.google.protobuf.MessageOrBuilder { + + /** + * .arrow.flight.protocol.FlightDescriptor.DescriptorType type = 1; + * @return The enum numeric value on the wire for type. + */ + int getTypeValue(); + /** + * .arrow.flight.protocol.FlightDescriptor.DescriptorType type = 1; + * @return The type. + */ + org.apache.arrow.flight.impl.Flight.FlightDescriptor.DescriptorType getType(); + + /** + *
+     *
+     * Opaque value used to express a command. Should only be defined when
+     * type = CMD.
+     * 
+ * + * bytes cmd = 2; + * @return The cmd. + */ + com.google.protobuf.ByteString getCmd(); + + /** + *
+     *
+     * List of strings identifying a particular dataset. Should only be defined
+     * when type = PATH.
+     * 
+ * + * repeated string path = 3; + * @return A list containing the path. + */ + java.util.List + getPathList(); + /** + *
+     *
+     * List of strings identifying a particular dataset. Should only be defined
+     * when type = PATH.
+     * 
+ * + * repeated string path = 3; + * @return The count of path. + */ + int getPathCount(); + /** + *
+     *
+     * List of strings identifying a particular dataset. Should only be defined
+     * when type = PATH.
+     * 
+ * + * repeated string path = 3; + * @param index The index of the element to return. + * @return The path at the given index. + */ + java.lang.String getPath(int index); + /** + *
+     *
+     * List of strings identifying a particular dataset. Should only be defined
+     * when type = PATH.
+     * 
+ * + * repeated string path = 3; + * @param index The index of the value to return. + * @return The bytes of the path at the given index. + */ + com.google.protobuf.ByteString + getPathBytes(int index); + } + /** + *
+   *
+   * The name or tag for a Flight. May be used as a way to retrieve or generate
+   * a flight or be used to expose a set of previously defined flights.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.FlightDescriptor} + */ + public static final class FlightDescriptor extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.FlightDescriptor) + FlightDescriptorOrBuilder { + private static final long serialVersionUID = 0L; + // Use FlightDescriptor.newBuilder() to construct. + private FlightDescriptor(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private FlightDescriptor() { + type_ = 0; + cmd_ = com.google.protobuf.ByteString.EMPTY; + path_ = + com.google.protobuf.LazyStringArrayList.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new FlightDescriptor(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_FlightDescriptor_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_FlightDescriptor_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.FlightDescriptor.class, org.apache.arrow.flight.impl.Flight.FlightDescriptor.Builder.class); + } + + /** + *
+     *
+     * Describes what type of descriptor is defined.
+     * 
+ * + * Protobuf enum {@code arrow.flight.protocol.FlightDescriptor.DescriptorType} + */ + public enum DescriptorType + implements com.google.protobuf.ProtocolMessageEnum { + /** + *
+       * Protobuf pattern, not used.
+       * 
+ * + * UNKNOWN = 0; + */ + UNKNOWN(0), + /** + *
+       *
+       * A named path that identifies a dataset. A path is composed of a string
+       * or list of strings describing a particular dataset. This is conceptually
+       *  similar to a path inside a filesystem.
+       * 
+ * + * PATH = 1; + */ + PATH(1), + /** + *
+       *
+       * An opaque command to generate a dataset.
+       * 
+ * + * CMD = 2; + */ + CMD(2), + UNRECOGNIZED(-1), + ; + + /** + *
+       * Protobuf pattern, not used.
+       * 
+ * + * UNKNOWN = 0; + */ + public static final int UNKNOWN_VALUE = 0; + /** + *
+       *
+       * A named path that identifies a dataset. A path is composed of a string
+       * or list of strings describing a particular dataset. This is conceptually
+       *  similar to a path inside a filesystem.
+       * 
+ * + * PATH = 1; + */ + public static final int PATH_VALUE = 1; + /** + *
+       *
+       * An opaque command to generate a dataset.
+       * 
+ * + * CMD = 2; + */ + public static final int CMD_VALUE = 2; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static DescriptorType valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static DescriptorType forNumber(int value) { + switch (value) { + case 0: return UNKNOWN; + case 1: return PATH; + case 2: return CMD; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + DescriptorType> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public DescriptorType findValueByNumber(int number) { + return DescriptorType.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.FlightDescriptor.getDescriptor().getEnumTypes().get(0); + } + + private static final DescriptorType[] VALUES = values(); + + public static DescriptorType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private DescriptorType(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.FlightDescriptor.DescriptorType) + } + + public static final int TYPE_FIELD_NUMBER = 1; + private int type_ = 0; + /** + * .arrow.flight.protocol.FlightDescriptor.DescriptorType type = 1; + * @return The enum numeric value on the wire for type. + */ + @java.lang.Override public int getTypeValue() { + return type_; + } + /** + * .arrow.flight.protocol.FlightDescriptor.DescriptorType type = 1; + * @return The type. + */ + @java.lang.Override public org.apache.arrow.flight.impl.Flight.FlightDescriptor.DescriptorType getType() { + org.apache.arrow.flight.impl.Flight.FlightDescriptor.DescriptorType result = org.apache.arrow.flight.impl.Flight.FlightDescriptor.DescriptorType.forNumber(type_); + return result == null ? org.apache.arrow.flight.impl.Flight.FlightDescriptor.DescriptorType.UNRECOGNIZED : result; + } + + public static final int CMD_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString cmd_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     *
+     * Opaque value used to express a command. Should only be defined when
+     * type = CMD.
+     * 
+ * + * bytes cmd = 2; + * @return The cmd. + */ + @java.lang.Override + public com.google.protobuf.ByteString getCmd() { + return cmd_; + } + + public static final int PATH_FIELD_NUMBER = 3; + @SuppressWarnings("serial") + private com.google.protobuf.LazyStringArrayList path_ = + com.google.protobuf.LazyStringArrayList.emptyList(); + /** + *
+     *
+     * List of strings identifying a particular dataset. Should only be defined
+     * when type = PATH.
+     * 
+ * + * repeated string path = 3; + * @return A list containing the path. + */ + public com.google.protobuf.ProtocolStringList + getPathList() { + return path_; + } + /** + *
+     *
+     * List of strings identifying a particular dataset. Should only be defined
+     * when type = PATH.
+     * 
+ * + * repeated string path = 3; + * @return The count of path. + */ + public int getPathCount() { + return path_.size(); + } + /** + *
+     *
+     * List of strings identifying a particular dataset. Should only be defined
+     * when type = PATH.
+     * 
+ * + * repeated string path = 3; + * @param index The index of the element to return. + * @return The path at the given index. + */ + public java.lang.String getPath(int index) { + return path_.get(index); + } + /** + *
+     *
+     * List of strings identifying a particular dataset. Should only be defined
+     * when type = PATH.
+     * 
+ * + * repeated string path = 3; + * @param index The index of the value to return. + * @return The bytes of the path at the given index. + */ + public com.google.protobuf.ByteString + getPathBytes(int index) { + return path_.getByteString(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (type_ != org.apache.arrow.flight.impl.Flight.FlightDescriptor.DescriptorType.UNKNOWN.getNumber()) { + output.writeEnum(1, type_); + } + if (!cmd_.isEmpty()) { + output.writeBytes(2, cmd_); + } + for (int i = 0; i < path_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, path_.getRaw(i)); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (type_ != org.apache.arrow.flight.impl.Flight.FlightDescriptor.DescriptorType.UNKNOWN.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(1, type_); + } + if (!cmd_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, cmd_); + } + { + int dataSize = 0; + for (int i = 0; i < path_.size(); i++) { + dataSize += computeStringSizeNoTag(path_.getRaw(i)); + } + size += dataSize; + size += 1 * getPathList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.FlightDescriptor)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.FlightDescriptor other = (org.apache.arrow.flight.impl.Flight.FlightDescriptor) obj; + + if (type_ != other.type_) return false; + if (!getCmd() + .equals(other.getCmd())) return false; + if (!getPathList() + .equals(other.getPathList())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + TYPE_FIELD_NUMBER; + hash = (53 * hash) + type_; + hash = (37 * hash) + CMD_FIELD_NUMBER; + hash = (53 * hash) + getCmd().hashCode(); + if (getPathCount() > 0) { + hash = (37 * hash) + PATH_FIELD_NUMBER; + hash = (53 * hash) + getPathList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.FlightDescriptor parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.FlightDescriptor parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.FlightDescriptor parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.FlightDescriptor parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.FlightDescriptor parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.FlightDescriptor parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.FlightDescriptor parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.FlightDescriptor parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.FlightDescriptor parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.FlightDescriptor parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.FlightDescriptor parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.FlightDescriptor parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.FlightDescriptor prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * The name or tag for a Flight. May be used as a way to retrieve or generate
+     * a flight or be used to expose a set of previously defined flights.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.FlightDescriptor} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.FlightDescriptor) + org.apache.arrow.flight.impl.Flight.FlightDescriptorOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_FlightDescriptor_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_FlightDescriptor_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.FlightDescriptor.class, org.apache.arrow.flight.impl.Flight.FlightDescriptor.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.FlightDescriptor.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + type_ = 0; + cmd_ = com.google.protobuf.ByteString.EMPTY; + path_ = + com.google.protobuf.LazyStringArrayList.emptyList(); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_FlightDescriptor_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightDescriptor getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.FlightDescriptor.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightDescriptor build() { + org.apache.arrow.flight.impl.Flight.FlightDescriptor result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightDescriptor buildPartial() { + org.apache.arrow.flight.impl.Flight.FlightDescriptor result = new org.apache.arrow.flight.impl.Flight.FlightDescriptor(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.FlightDescriptor result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.type_ = type_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.cmd_ = cmd_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + path_.makeImmutable(); + result.path_ = path_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.FlightDescriptor) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.FlightDescriptor)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.FlightDescriptor other) { + if (other == org.apache.arrow.flight.impl.Flight.FlightDescriptor.getDefaultInstance()) return this; + if (other.type_ != 0) { + setTypeValue(other.getTypeValue()); + } + if (other.getCmd() != com.google.protobuf.ByteString.EMPTY) { + setCmd(other.getCmd()); + } + if (!other.path_.isEmpty()) { + if (path_.isEmpty()) { + path_ = other.path_; + bitField0_ |= 0x00000004; + } else { + ensurePathIsMutable(); + path_.addAll(other.path_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + type_ = input.readEnum(); + bitField0_ |= 0x00000001; + break; + } // case 8 + case 18: { + cmd_ = input.readBytes(); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: { + java.lang.String s = input.readStringRequireUtf8(); + ensurePathIsMutable(); + path_.add(s); + break; + } // case 26 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private int type_ = 0; + /** + * .arrow.flight.protocol.FlightDescriptor.DescriptorType type = 1; + * @return The enum numeric value on the wire for type. + */ + @java.lang.Override public int getTypeValue() { + return type_; + } + /** + * .arrow.flight.protocol.FlightDescriptor.DescriptorType type = 1; + * @param value The enum numeric value on the wire for type to set. + * @return This builder for chaining. + */ + public Builder setTypeValue(int value) { + type_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * .arrow.flight.protocol.FlightDescriptor.DescriptorType type = 1; + * @return The type. + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightDescriptor.DescriptorType getType() { + org.apache.arrow.flight.impl.Flight.FlightDescriptor.DescriptorType result = org.apache.arrow.flight.impl.Flight.FlightDescriptor.DescriptorType.forNumber(type_); + return result == null ? org.apache.arrow.flight.impl.Flight.FlightDescriptor.DescriptorType.UNRECOGNIZED : result; + } + /** + * .arrow.flight.protocol.FlightDescriptor.DescriptorType type = 1; + * @param value The type to set. + * @return This builder for chaining. + */ + public Builder setType(org.apache.arrow.flight.impl.Flight.FlightDescriptor.DescriptorType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + type_ = value.getNumber(); + onChanged(); + return this; + } + /** + * .arrow.flight.protocol.FlightDescriptor.DescriptorType type = 1; + * @return This builder for chaining. + */ + public Builder clearType() { + bitField0_ = (bitField0_ & ~0x00000001); + type_ = 0; + onChanged(); + return this; + } + + private com.google.protobuf.ByteString cmd_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       *
+       * Opaque value used to express a command. Should only be defined when
+       * type = CMD.
+       * 
+ * + * bytes cmd = 2; + * @return The cmd. + */ + @java.lang.Override + public com.google.protobuf.ByteString getCmd() { + return cmd_; + } + /** + *
+       *
+       * Opaque value used to express a command. Should only be defined when
+       * type = CMD.
+       * 
+ * + * bytes cmd = 2; + * @param value The cmd to set. + * @return This builder for chaining. + */ + public Builder setCmd(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + cmd_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       *
+       * Opaque value used to express a command. Should only be defined when
+       * type = CMD.
+       * 
+ * + * bytes cmd = 2; + * @return This builder for chaining. + */ + public Builder clearCmd() { + bitField0_ = (bitField0_ & ~0x00000002); + cmd_ = getDefaultInstance().getCmd(); + onChanged(); + return this; + } + + private com.google.protobuf.LazyStringArrayList path_ = + com.google.protobuf.LazyStringArrayList.emptyList(); + private void ensurePathIsMutable() { + if (!path_.isModifiable()) { + path_ = new com.google.protobuf.LazyStringArrayList(path_); + } + bitField0_ |= 0x00000004; + } + /** + *
+       *
+       * List of strings identifying a particular dataset. Should only be defined
+       * when type = PATH.
+       * 
+ * + * repeated string path = 3; + * @return A list containing the path. + */ + public com.google.protobuf.ProtocolStringList + getPathList() { + path_.makeImmutable(); + return path_; + } + /** + *
+       *
+       * List of strings identifying a particular dataset. Should only be defined
+       * when type = PATH.
+       * 
+ * + * repeated string path = 3; + * @return The count of path. + */ + public int getPathCount() { + return path_.size(); + } + /** + *
+       *
+       * List of strings identifying a particular dataset. Should only be defined
+       * when type = PATH.
+       * 
+ * + * repeated string path = 3; + * @param index The index of the element to return. + * @return The path at the given index. + */ + public java.lang.String getPath(int index) { + return path_.get(index); + } + /** + *
+       *
+       * List of strings identifying a particular dataset. Should only be defined
+       * when type = PATH.
+       * 
+ * + * repeated string path = 3; + * @param index The index of the value to return. + * @return The bytes of the path at the given index. + */ + public com.google.protobuf.ByteString + getPathBytes(int index) { + return path_.getByteString(index); + } + /** + *
+       *
+       * List of strings identifying a particular dataset. Should only be defined
+       * when type = PATH.
+       * 
+ * + * repeated string path = 3; + * @param index The index to set the value at. + * @param value The path to set. + * @return This builder for chaining. + */ + public Builder setPath( + int index, java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + ensurePathIsMutable(); + path_.set(index, value); + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       *
+       * List of strings identifying a particular dataset. Should only be defined
+       * when type = PATH.
+       * 
+ * + * repeated string path = 3; + * @param value The path to add. + * @return This builder for chaining. + */ + public Builder addPath( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + ensurePathIsMutable(); + path_.add(value); + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       *
+       * List of strings identifying a particular dataset. Should only be defined
+       * when type = PATH.
+       * 
+ * + * repeated string path = 3; + * @param values The path to add. + * @return This builder for chaining. + */ + public Builder addAllPath( + java.lang.Iterable values) { + ensurePathIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, path_); + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       *
+       * List of strings identifying a particular dataset. Should only be defined
+       * when type = PATH.
+       * 
+ * + * repeated string path = 3; + * @return This builder for chaining. + */ + public Builder clearPath() { + path_ = + com.google.protobuf.LazyStringArrayList.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004);; + onChanged(); + return this; + } + /** + *
+       *
+       * List of strings identifying a particular dataset. Should only be defined
+       * when type = PATH.
+       * 
+ * + * repeated string path = 3; + * @param value The bytes of the path to add. + * @return This builder for chaining. + */ + public Builder addPathBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + ensurePathIsMutable(); + path_.add(value); + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.FlightDescriptor) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.FlightDescriptor) + private static final org.apache.arrow.flight.impl.Flight.FlightDescriptor DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.FlightDescriptor(); + } + + public static org.apache.arrow.flight.impl.Flight.FlightDescriptor getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public FlightDescriptor parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightDescriptor getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface FlightInfoOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.FlightInfo) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * The schema of the dataset in its IPC form:
+     *   4 bytes - an optional IPC_CONTINUATION_TOKEN prefix
+     *   4 bytes - the byte length of the payload
+     *   a flatbuffer Message whose header is the Schema
+     * 
+ * + * bytes schema = 1; + * @return The schema. + */ + com.google.protobuf.ByteString getSchema(); + + /** + *
+     *
+     * The descriptor associated with this info.
+     * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + * @return Whether the flightDescriptor field is set. + */ + boolean hasFlightDescriptor(); + /** + *
+     *
+     * The descriptor associated with this info.
+     * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + * @return The flightDescriptor. + */ + org.apache.arrow.flight.impl.Flight.FlightDescriptor getFlightDescriptor(); + /** + *
+     *
+     * The descriptor associated with this info.
+     * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + */ + org.apache.arrow.flight.impl.Flight.FlightDescriptorOrBuilder getFlightDescriptorOrBuilder(); + + /** + *
+     *
+     * A list of endpoints associated with the flight. To consume the
+     * whole flight, all endpoints (and hence all Tickets) must be
+     * consumed. Endpoints can be consumed in any order.
+     *
+     * In other words, an application can use multiple endpoints to
+     * represent partitioned data.
+     *
+     * If the returned data has an ordering, an application can use
+     * "FlightInfo.ordered = true" or should return the all data in a
+     * single endpoint. Otherwise, there is no ordering defined on
+     * endpoints or the data within.
+     *
+     * A client can read ordered data by reading data from returned
+     * endpoints, in order, from front to back.
+     *
+     * Note that a client may ignore "FlightInfo.ordered = true". If an
+     * ordering is important for an application, an application must
+     * choose one of them:
+     *
+     * * An application requires that all clients must read data in
+     *   returned endpoints order.
+     * * An application must return the all data in a single endpoint.
+     * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + java.util.List + getEndpointList(); + /** + *
+     *
+     * A list of endpoints associated with the flight. To consume the
+     * whole flight, all endpoints (and hence all Tickets) must be
+     * consumed. Endpoints can be consumed in any order.
+     *
+     * In other words, an application can use multiple endpoints to
+     * represent partitioned data.
+     *
+     * If the returned data has an ordering, an application can use
+     * "FlightInfo.ordered = true" or should return the all data in a
+     * single endpoint. Otherwise, there is no ordering defined on
+     * endpoints or the data within.
+     *
+     * A client can read ordered data by reading data from returned
+     * endpoints, in order, from front to back.
+     *
+     * Note that a client may ignore "FlightInfo.ordered = true". If an
+     * ordering is important for an application, an application must
+     * choose one of them:
+     *
+     * * An application requires that all clients must read data in
+     *   returned endpoints order.
+     * * An application must return the all data in a single endpoint.
+     * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + org.apache.arrow.flight.impl.Flight.FlightEndpoint getEndpoint(int index); + /** + *
+     *
+     * A list of endpoints associated with the flight. To consume the
+     * whole flight, all endpoints (and hence all Tickets) must be
+     * consumed. Endpoints can be consumed in any order.
+     *
+     * In other words, an application can use multiple endpoints to
+     * represent partitioned data.
+     *
+     * If the returned data has an ordering, an application can use
+     * "FlightInfo.ordered = true" or should return the all data in a
+     * single endpoint. Otherwise, there is no ordering defined on
+     * endpoints or the data within.
+     *
+     * A client can read ordered data by reading data from returned
+     * endpoints, in order, from front to back.
+     *
+     * Note that a client may ignore "FlightInfo.ordered = true". If an
+     * ordering is important for an application, an application must
+     * choose one of them:
+     *
+     * * An application requires that all clients must read data in
+     *   returned endpoints order.
+     * * An application must return the all data in a single endpoint.
+     * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + int getEndpointCount(); + /** + *
+     *
+     * A list of endpoints associated with the flight. To consume the
+     * whole flight, all endpoints (and hence all Tickets) must be
+     * consumed. Endpoints can be consumed in any order.
+     *
+     * In other words, an application can use multiple endpoints to
+     * represent partitioned data.
+     *
+     * If the returned data has an ordering, an application can use
+     * "FlightInfo.ordered = true" or should return the all data in a
+     * single endpoint. Otherwise, there is no ordering defined on
+     * endpoints or the data within.
+     *
+     * A client can read ordered data by reading data from returned
+     * endpoints, in order, from front to back.
+     *
+     * Note that a client may ignore "FlightInfo.ordered = true". If an
+     * ordering is important for an application, an application must
+     * choose one of them:
+     *
+     * * An application requires that all clients must read data in
+     *   returned endpoints order.
+     * * An application must return the all data in a single endpoint.
+     * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + java.util.List + getEndpointOrBuilderList(); + /** + *
+     *
+     * A list of endpoints associated with the flight. To consume the
+     * whole flight, all endpoints (and hence all Tickets) must be
+     * consumed. Endpoints can be consumed in any order.
+     *
+     * In other words, an application can use multiple endpoints to
+     * represent partitioned data.
+     *
+     * If the returned data has an ordering, an application can use
+     * "FlightInfo.ordered = true" or should return the all data in a
+     * single endpoint. Otherwise, there is no ordering defined on
+     * endpoints or the data within.
+     *
+     * A client can read ordered data by reading data from returned
+     * endpoints, in order, from front to back.
+     *
+     * Note that a client may ignore "FlightInfo.ordered = true". If an
+     * ordering is important for an application, an application must
+     * choose one of them:
+     *
+     * * An application requires that all clients must read data in
+     *   returned endpoints order.
+     * * An application must return the all data in a single endpoint.
+     * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + org.apache.arrow.flight.impl.Flight.FlightEndpointOrBuilder getEndpointOrBuilder( + int index); + + /** + *
+     * Set these to -1 if unknown.
+     * 
+ * + * int64 total_records = 4; + * @return The totalRecords. + */ + long getTotalRecords(); + + /** + * int64 total_bytes = 5; + * @return The totalBytes. + */ + long getTotalBytes(); + + /** + *
+     *
+     * FlightEndpoints are in the same order as the data.
+     * 
+ * + * bool ordered = 6; + * @return The ordered. + */ + boolean getOrdered(); + + /** + *
+     *
+     * Application-defined metadata.
+     *
+     * There is no inherent or required relationship between this
+     * and the app_metadata fields in the FlightEndpoints or resulting
+     * FlightData messages. Since this metadata is application-defined,
+     * a given application could define there to be a relationship,
+     * but there is none required by the spec.
+     * 
+ * + * bytes app_metadata = 7; + * @return The appMetadata. + */ + com.google.protobuf.ByteString getAppMetadata(); + } + /** + *
+   *
+   * The access coordinates for retrieval of a dataset. With a FlightInfo, a
+   * consumer is able to determine how to retrieve a dataset.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.FlightInfo} + */ + public static final class FlightInfo extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.FlightInfo) + FlightInfoOrBuilder { + private static final long serialVersionUID = 0L; + // Use FlightInfo.newBuilder() to construct. + private FlightInfo(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private FlightInfo() { + schema_ = com.google.protobuf.ByteString.EMPTY; + endpoint_ = java.util.Collections.emptyList(); + appMetadata_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new FlightInfo(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_FlightInfo_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_FlightInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.FlightInfo.class, org.apache.arrow.flight.impl.Flight.FlightInfo.Builder.class); + } + + public static final int SCHEMA_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString schema_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     * The schema of the dataset in its IPC form:
+     *   4 bytes - an optional IPC_CONTINUATION_TOKEN prefix
+     *   4 bytes - the byte length of the payload
+     *   a flatbuffer Message whose header is the Schema
+     * 
+ * + * bytes schema = 1; + * @return The schema. + */ + @java.lang.Override + public com.google.protobuf.ByteString getSchema() { + return schema_; + } + + public static final int FLIGHT_DESCRIPTOR_FIELD_NUMBER = 2; + private org.apache.arrow.flight.impl.Flight.FlightDescriptor flightDescriptor_; + /** + *
+     *
+     * The descriptor associated with this info.
+     * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + * @return Whether the flightDescriptor field is set. + */ + @java.lang.Override + public boolean hasFlightDescriptor() { + return flightDescriptor_ != null; + } + /** + *
+     *
+     * The descriptor associated with this info.
+     * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + * @return The flightDescriptor. + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightDescriptor getFlightDescriptor() { + return flightDescriptor_ == null ? org.apache.arrow.flight.impl.Flight.FlightDescriptor.getDefaultInstance() : flightDescriptor_; + } + /** + *
+     *
+     * The descriptor associated with this info.
+     * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightDescriptorOrBuilder getFlightDescriptorOrBuilder() { + return flightDescriptor_ == null ? org.apache.arrow.flight.impl.Flight.FlightDescriptor.getDefaultInstance() : flightDescriptor_; + } + + public static final int ENDPOINT_FIELD_NUMBER = 3; + @SuppressWarnings("serial") + private java.util.List endpoint_; + /** + *
+     *
+     * A list of endpoints associated with the flight. To consume the
+     * whole flight, all endpoints (and hence all Tickets) must be
+     * consumed. Endpoints can be consumed in any order.
+     *
+     * In other words, an application can use multiple endpoints to
+     * represent partitioned data.
+     *
+     * If the returned data has an ordering, an application can use
+     * "FlightInfo.ordered = true" or should return the all data in a
+     * single endpoint. Otherwise, there is no ordering defined on
+     * endpoints or the data within.
+     *
+     * A client can read ordered data by reading data from returned
+     * endpoints, in order, from front to back.
+     *
+     * Note that a client may ignore "FlightInfo.ordered = true". If an
+     * ordering is important for an application, an application must
+     * choose one of them:
+     *
+     * * An application requires that all clients must read data in
+     *   returned endpoints order.
+     * * An application must return the all data in a single endpoint.
+     * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + @java.lang.Override + public java.util.List getEndpointList() { + return endpoint_; + } + /** + *
+     *
+     * A list of endpoints associated with the flight. To consume the
+     * whole flight, all endpoints (and hence all Tickets) must be
+     * consumed. Endpoints can be consumed in any order.
+     *
+     * In other words, an application can use multiple endpoints to
+     * represent partitioned data.
+     *
+     * If the returned data has an ordering, an application can use
+     * "FlightInfo.ordered = true" or should return the all data in a
+     * single endpoint. Otherwise, there is no ordering defined on
+     * endpoints or the data within.
+     *
+     * A client can read ordered data by reading data from returned
+     * endpoints, in order, from front to back.
+     *
+     * Note that a client may ignore "FlightInfo.ordered = true". If an
+     * ordering is important for an application, an application must
+     * choose one of them:
+     *
+     * * An application requires that all clients must read data in
+     *   returned endpoints order.
+     * * An application must return the all data in a single endpoint.
+     * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + @java.lang.Override + public java.util.List + getEndpointOrBuilderList() { + return endpoint_; + } + /** + *
+     *
+     * A list of endpoints associated with the flight. To consume the
+     * whole flight, all endpoints (and hence all Tickets) must be
+     * consumed. Endpoints can be consumed in any order.
+     *
+     * In other words, an application can use multiple endpoints to
+     * represent partitioned data.
+     *
+     * If the returned data has an ordering, an application can use
+     * "FlightInfo.ordered = true" or should return the all data in a
+     * single endpoint. Otherwise, there is no ordering defined on
+     * endpoints or the data within.
+     *
+     * A client can read ordered data by reading data from returned
+     * endpoints, in order, from front to back.
+     *
+     * Note that a client may ignore "FlightInfo.ordered = true". If an
+     * ordering is important for an application, an application must
+     * choose one of them:
+     *
+     * * An application requires that all clients must read data in
+     *   returned endpoints order.
+     * * An application must return the all data in a single endpoint.
+     * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + @java.lang.Override + public int getEndpointCount() { + return endpoint_.size(); + } + /** + *
+     *
+     * A list of endpoints associated with the flight. To consume the
+     * whole flight, all endpoints (and hence all Tickets) must be
+     * consumed. Endpoints can be consumed in any order.
+     *
+     * In other words, an application can use multiple endpoints to
+     * represent partitioned data.
+     *
+     * If the returned data has an ordering, an application can use
+     * "FlightInfo.ordered = true" or should return the all data in a
+     * single endpoint. Otherwise, there is no ordering defined on
+     * endpoints or the data within.
+     *
+     * A client can read ordered data by reading data from returned
+     * endpoints, in order, from front to back.
+     *
+     * Note that a client may ignore "FlightInfo.ordered = true". If an
+     * ordering is important for an application, an application must
+     * choose one of them:
+     *
+     * * An application requires that all clients must read data in
+     *   returned endpoints order.
+     * * An application must return the all data in a single endpoint.
+     * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightEndpoint getEndpoint(int index) { + return endpoint_.get(index); + } + /** + *
+     *
+     * A list of endpoints associated with the flight. To consume the
+     * whole flight, all endpoints (and hence all Tickets) must be
+     * consumed. Endpoints can be consumed in any order.
+     *
+     * In other words, an application can use multiple endpoints to
+     * represent partitioned data.
+     *
+     * If the returned data has an ordering, an application can use
+     * "FlightInfo.ordered = true" or should return the all data in a
+     * single endpoint. Otherwise, there is no ordering defined on
+     * endpoints or the data within.
+     *
+     * A client can read ordered data by reading data from returned
+     * endpoints, in order, from front to back.
+     *
+     * Note that a client may ignore "FlightInfo.ordered = true". If an
+     * ordering is important for an application, an application must
+     * choose one of them:
+     *
+     * * An application requires that all clients must read data in
+     *   returned endpoints order.
+     * * An application must return the all data in a single endpoint.
+     * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightEndpointOrBuilder getEndpointOrBuilder( + int index) { + return endpoint_.get(index); + } + + public static final int TOTAL_RECORDS_FIELD_NUMBER = 4; + private long totalRecords_ = 0L; + /** + *
+     * Set these to -1 if unknown.
+     * 
+ * + * int64 total_records = 4; + * @return The totalRecords. + */ + @java.lang.Override + public long getTotalRecords() { + return totalRecords_; + } + + public static final int TOTAL_BYTES_FIELD_NUMBER = 5; + private long totalBytes_ = 0L; + /** + * int64 total_bytes = 5; + * @return The totalBytes. + */ + @java.lang.Override + public long getTotalBytes() { + return totalBytes_; + } + + public static final int ORDERED_FIELD_NUMBER = 6; + private boolean ordered_ = false; + /** + *
+     *
+     * FlightEndpoints are in the same order as the data.
+     * 
+ * + * bool ordered = 6; + * @return The ordered. + */ + @java.lang.Override + public boolean getOrdered() { + return ordered_; + } + + public static final int APP_METADATA_FIELD_NUMBER = 7; + private com.google.protobuf.ByteString appMetadata_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     *
+     * Application-defined metadata.
+     *
+     * There is no inherent or required relationship between this
+     * and the app_metadata fields in the FlightEndpoints or resulting
+     * FlightData messages. Since this metadata is application-defined,
+     * a given application could define there to be a relationship,
+     * but there is none required by the spec.
+     * 
+ * + * bytes app_metadata = 7; + * @return The appMetadata. + */ + @java.lang.Override + public com.google.protobuf.ByteString getAppMetadata() { + return appMetadata_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!schema_.isEmpty()) { + output.writeBytes(1, schema_); + } + if (flightDescriptor_ != null) { + output.writeMessage(2, getFlightDescriptor()); + } + for (int i = 0; i < endpoint_.size(); i++) { + output.writeMessage(3, endpoint_.get(i)); + } + if (totalRecords_ != 0L) { + output.writeInt64(4, totalRecords_); + } + if (totalBytes_ != 0L) { + output.writeInt64(5, totalBytes_); + } + if (ordered_ != false) { + output.writeBool(6, ordered_); + } + if (!appMetadata_.isEmpty()) { + output.writeBytes(7, appMetadata_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!schema_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, schema_); + } + if (flightDescriptor_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, getFlightDescriptor()); + } + for (int i = 0; i < endpoint_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, endpoint_.get(i)); + } + if (totalRecords_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(4, totalRecords_); + } + if (totalBytes_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(5, totalBytes_); + } + if (ordered_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(6, ordered_); + } + if (!appMetadata_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(7, appMetadata_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.FlightInfo)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.FlightInfo other = (org.apache.arrow.flight.impl.Flight.FlightInfo) obj; + + if (!getSchema() + .equals(other.getSchema())) return false; + if (hasFlightDescriptor() != other.hasFlightDescriptor()) return false; + if (hasFlightDescriptor()) { + if (!getFlightDescriptor() + .equals(other.getFlightDescriptor())) return false; + } + if (!getEndpointList() + .equals(other.getEndpointList())) return false; + if (getTotalRecords() + != other.getTotalRecords()) return false; + if (getTotalBytes() + != other.getTotalBytes()) return false; + if (getOrdered() + != other.getOrdered()) return false; + if (!getAppMetadata() + .equals(other.getAppMetadata())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + SCHEMA_FIELD_NUMBER; + hash = (53 * hash) + getSchema().hashCode(); + if (hasFlightDescriptor()) { + hash = (37 * hash) + FLIGHT_DESCRIPTOR_FIELD_NUMBER; + hash = (53 * hash) + getFlightDescriptor().hashCode(); + } + if (getEndpointCount() > 0) { + hash = (37 * hash) + ENDPOINT_FIELD_NUMBER; + hash = (53 * hash) + getEndpointList().hashCode(); + } + hash = (37 * hash) + TOTAL_RECORDS_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getTotalRecords()); + hash = (37 * hash) + TOTAL_BYTES_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getTotalBytes()); + hash = (37 * hash) + ORDERED_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getOrdered()); + hash = (37 * hash) + APP_METADATA_FIELD_NUMBER; + hash = (53 * hash) + getAppMetadata().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.FlightInfo parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.FlightInfo parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.FlightInfo parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.FlightInfo parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.FlightInfo parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.FlightInfo parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.FlightInfo parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.FlightInfo parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.FlightInfo parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.FlightInfo parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.FlightInfo parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.FlightInfo parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.FlightInfo prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * The access coordinates for retrieval of a dataset. With a FlightInfo, a
+     * consumer is able to determine how to retrieve a dataset.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.FlightInfo} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.FlightInfo) + org.apache.arrow.flight.impl.Flight.FlightInfoOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_FlightInfo_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_FlightInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.FlightInfo.class, org.apache.arrow.flight.impl.Flight.FlightInfo.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.FlightInfo.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + schema_ = com.google.protobuf.ByteString.EMPTY; + flightDescriptor_ = null; + if (flightDescriptorBuilder_ != null) { + flightDescriptorBuilder_.dispose(); + flightDescriptorBuilder_ = null; + } + if (endpointBuilder_ == null) { + endpoint_ = java.util.Collections.emptyList(); + } else { + endpoint_ = null; + endpointBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000004); + totalRecords_ = 0L; + totalBytes_ = 0L; + ordered_ = false; + appMetadata_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_FlightInfo_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightInfo getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.FlightInfo.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightInfo build() { + org.apache.arrow.flight.impl.Flight.FlightInfo result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightInfo buildPartial() { + org.apache.arrow.flight.impl.Flight.FlightInfo result = new org.apache.arrow.flight.impl.Flight.FlightInfo(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields(org.apache.arrow.flight.impl.Flight.FlightInfo result) { + if (endpointBuilder_ == null) { + if (((bitField0_ & 0x00000004) != 0)) { + endpoint_ = java.util.Collections.unmodifiableList(endpoint_); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.endpoint_ = endpoint_; + } else { + result.endpoint_ = endpointBuilder_.build(); + } + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.FlightInfo result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.schema_ = schema_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.flightDescriptor_ = flightDescriptorBuilder_ == null + ? flightDescriptor_ + : flightDescriptorBuilder_.build(); + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.totalRecords_ = totalRecords_; + } + if (((from_bitField0_ & 0x00000010) != 0)) { + result.totalBytes_ = totalBytes_; + } + if (((from_bitField0_ & 0x00000020) != 0)) { + result.ordered_ = ordered_; + } + if (((from_bitField0_ & 0x00000040) != 0)) { + result.appMetadata_ = appMetadata_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.FlightInfo) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.FlightInfo)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.FlightInfo other) { + if (other == org.apache.arrow.flight.impl.Flight.FlightInfo.getDefaultInstance()) return this; + if (other.getSchema() != com.google.protobuf.ByteString.EMPTY) { + setSchema(other.getSchema()); + } + if (other.hasFlightDescriptor()) { + mergeFlightDescriptor(other.getFlightDescriptor()); + } + if (endpointBuilder_ == null) { + if (!other.endpoint_.isEmpty()) { + if (endpoint_.isEmpty()) { + endpoint_ = other.endpoint_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureEndpointIsMutable(); + endpoint_.addAll(other.endpoint_); + } + onChanged(); + } + } else { + if (!other.endpoint_.isEmpty()) { + if (endpointBuilder_.isEmpty()) { + endpointBuilder_.dispose(); + endpointBuilder_ = null; + endpoint_ = other.endpoint_; + bitField0_ = (bitField0_ & ~0x00000004); + endpointBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getEndpointFieldBuilder() : null; + } else { + endpointBuilder_.addAllMessages(other.endpoint_); + } + } + } + if (other.getTotalRecords() != 0L) { + setTotalRecords(other.getTotalRecords()); + } + if (other.getTotalBytes() != 0L) { + setTotalBytes(other.getTotalBytes()); + } + if (other.getOrdered() != false) { + setOrdered(other.getOrdered()); + } + if (other.getAppMetadata() != com.google.protobuf.ByteString.EMPTY) { + setAppMetadata(other.getAppMetadata()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + schema_ = input.readBytes(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + input.readMessage( + getFlightDescriptorFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: { + org.apache.arrow.flight.impl.Flight.FlightEndpoint m = + input.readMessage( + org.apache.arrow.flight.impl.Flight.FlightEndpoint.parser(), + extensionRegistry); + if (endpointBuilder_ == null) { + ensureEndpointIsMutable(); + endpoint_.add(m); + } else { + endpointBuilder_.addMessage(m); + } + break; + } // case 26 + case 32: { + totalRecords_ = input.readInt64(); + bitField0_ |= 0x00000008; + break; + } // case 32 + case 40: { + totalBytes_ = input.readInt64(); + bitField0_ |= 0x00000010; + break; + } // case 40 + case 48: { + ordered_ = input.readBool(); + bitField0_ |= 0x00000020; + break; + } // case 48 + case 58: { + appMetadata_ = input.readBytes(); + bitField0_ |= 0x00000040; + break; + } // case 58 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.ByteString schema_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * The schema of the dataset in its IPC form:
+       *   4 bytes - an optional IPC_CONTINUATION_TOKEN prefix
+       *   4 bytes - the byte length of the payload
+       *   a flatbuffer Message whose header is the Schema
+       * 
+ * + * bytes schema = 1; + * @return The schema. + */ + @java.lang.Override + public com.google.protobuf.ByteString getSchema() { + return schema_; + } + /** + *
+       * The schema of the dataset in its IPC form:
+       *   4 bytes - an optional IPC_CONTINUATION_TOKEN prefix
+       *   4 bytes - the byte length of the payload
+       *   a flatbuffer Message whose header is the Schema
+       * 
+ * + * bytes schema = 1; + * @param value The schema to set. + * @return This builder for chaining. + */ + public Builder setSchema(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + schema_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * The schema of the dataset in its IPC form:
+       *   4 bytes - an optional IPC_CONTINUATION_TOKEN prefix
+       *   4 bytes - the byte length of the payload
+       *   a flatbuffer Message whose header is the Schema
+       * 
+ * + * bytes schema = 1; + * @return This builder for chaining. + */ + public Builder clearSchema() { + bitField0_ = (bitField0_ & ~0x00000001); + schema_ = getDefaultInstance().getSchema(); + onChanged(); + return this; + } + + private org.apache.arrow.flight.impl.Flight.FlightDescriptor flightDescriptor_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.FlightDescriptor, org.apache.arrow.flight.impl.Flight.FlightDescriptor.Builder, org.apache.arrow.flight.impl.Flight.FlightDescriptorOrBuilder> flightDescriptorBuilder_; + /** + *
+       *
+       * The descriptor associated with this info.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + * @return Whether the flightDescriptor field is set. + */ + public boolean hasFlightDescriptor() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+       *
+       * The descriptor associated with this info.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + * @return The flightDescriptor. + */ + public org.apache.arrow.flight.impl.Flight.FlightDescriptor getFlightDescriptor() { + if (flightDescriptorBuilder_ == null) { + return flightDescriptor_ == null ? org.apache.arrow.flight.impl.Flight.FlightDescriptor.getDefaultInstance() : flightDescriptor_; + } else { + return flightDescriptorBuilder_.getMessage(); + } + } + /** + *
+       *
+       * The descriptor associated with this info.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + */ + public Builder setFlightDescriptor(org.apache.arrow.flight.impl.Flight.FlightDescriptor value) { + if (flightDescriptorBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + flightDescriptor_ = value; + } else { + flightDescriptorBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       *
+       * The descriptor associated with this info.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + */ + public Builder setFlightDescriptor( + org.apache.arrow.flight.impl.Flight.FlightDescriptor.Builder builderForValue) { + if (flightDescriptorBuilder_ == null) { + flightDescriptor_ = builderForValue.build(); + } else { + flightDescriptorBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       *
+       * The descriptor associated with this info.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + */ + public Builder mergeFlightDescriptor(org.apache.arrow.flight.impl.Flight.FlightDescriptor value) { + if (flightDescriptorBuilder_ == null) { + if (((bitField0_ & 0x00000002) != 0) && + flightDescriptor_ != null && + flightDescriptor_ != org.apache.arrow.flight.impl.Flight.FlightDescriptor.getDefaultInstance()) { + getFlightDescriptorBuilder().mergeFrom(value); + } else { + flightDescriptor_ = value; + } + } else { + flightDescriptorBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       *
+       * The descriptor associated with this info.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + */ + public Builder clearFlightDescriptor() { + bitField0_ = (bitField0_ & ~0x00000002); + flightDescriptor_ = null; + if (flightDescriptorBuilder_ != null) { + flightDescriptorBuilder_.dispose(); + flightDescriptorBuilder_ = null; + } + onChanged(); + return this; + } + /** + *
+       *
+       * The descriptor associated with this info.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + */ + public org.apache.arrow.flight.impl.Flight.FlightDescriptor.Builder getFlightDescriptorBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getFlightDescriptorFieldBuilder().getBuilder(); + } + /** + *
+       *
+       * The descriptor associated with this info.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + */ + public org.apache.arrow.flight.impl.Flight.FlightDescriptorOrBuilder getFlightDescriptorOrBuilder() { + if (flightDescriptorBuilder_ != null) { + return flightDescriptorBuilder_.getMessageOrBuilder(); + } else { + return flightDescriptor_ == null ? + org.apache.arrow.flight.impl.Flight.FlightDescriptor.getDefaultInstance() : flightDescriptor_; + } + } + /** + *
+       *
+       * The descriptor associated with this info.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.FlightDescriptor, org.apache.arrow.flight.impl.Flight.FlightDescriptor.Builder, org.apache.arrow.flight.impl.Flight.FlightDescriptorOrBuilder> + getFlightDescriptorFieldBuilder() { + if (flightDescriptorBuilder_ == null) { + flightDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.FlightDescriptor, org.apache.arrow.flight.impl.Flight.FlightDescriptor.Builder, org.apache.arrow.flight.impl.Flight.FlightDescriptorOrBuilder>( + getFlightDescriptor(), + getParentForChildren(), + isClean()); + flightDescriptor_ = null; + } + return flightDescriptorBuilder_; + } + + private java.util.List endpoint_ = + java.util.Collections.emptyList(); + private void ensureEndpointIsMutable() { + if (!((bitField0_ & 0x00000004) != 0)) { + endpoint_ = new java.util.ArrayList(endpoint_); + bitField0_ |= 0x00000004; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.FlightEndpoint, org.apache.arrow.flight.impl.Flight.FlightEndpoint.Builder, org.apache.arrow.flight.impl.Flight.FlightEndpointOrBuilder> endpointBuilder_; + + /** + *
+       *
+       * A list of endpoints associated with the flight. To consume the
+       * whole flight, all endpoints (and hence all Tickets) must be
+       * consumed. Endpoints can be consumed in any order.
+       *
+       * In other words, an application can use multiple endpoints to
+       * represent partitioned data.
+       *
+       * If the returned data has an ordering, an application can use
+       * "FlightInfo.ordered = true" or should return the all data in a
+       * single endpoint. Otherwise, there is no ordering defined on
+       * endpoints or the data within.
+       *
+       * A client can read ordered data by reading data from returned
+       * endpoints, in order, from front to back.
+       *
+       * Note that a client may ignore "FlightInfo.ordered = true". If an
+       * ordering is important for an application, an application must
+       * choose one of them:
+       *
+       * * An application requires that all clients must read data in
+       *   returned endpoints order.
+       * * An application must return the all data in a single endpoint.
+       * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + public java.util.List getEndpointList() { + if (endpointBuilder_ == null) { + return java.util.Collections.unmodifiableList(endpoint_); + } else { + return endpointBuilder_.getMessageList(); + } + } + /** + *
+       *
+       * A list of endpoints associated with the flight. To consume the
+       * whole flight, all endpoints (and hence all Tickets) must be
+       * consumed. Endpoints can be consumed in any order.
+       *
+       * In other words, an application can use multiple endpoints to
+       * represent partitioned data.
+       *
+       * If the returned data has an ordering, an application can use
+       * "FlightInfo.ordered = true" or should return the all data in a
+       * single endpoint. Otherwise, there is no ordering defined on
+       * endpoints or the data within.
+       *
+       * A client can read ordered data by reading data from returned
+       * endpoints, in order, from front to back.
+       *
+       * Note that a client may ignore "FlightInfo.ordered = true". If an
+       * ordering is important for an application, an application must
+       * choose one of them:
+       *
+       * * An application requires that all clients must read data in
+       *   returned endpoints order.
+       * * An application must return the all data in a single endpoint.
+       * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + public int getEndpointCount() { + if (endpointBuilder_ == null) { + return endpoint_.size(); + } else { + return endpointBuilder_.getCount(); + } + } + /** + *
+       *
+       * A list of endpoints associated with the flight. To consume the
+       * whole flight, all endpoints (and hence all Tickets) must be
+       * consumed. Endpoints can be consumed in any order.
+       *
+       * In other words, an application can use multiple endpoints to
+       * represent partitioned data.
+       *
+       * If the returned data has an ordering, an application can use
+       * "FlightInfo.ordered = true" or should return the all data in a
+       * single endpoint. Otherwise, there is no ordering defined on
+       * endpoints or the data within.
+       *
+       * A client can read ordered data by reading data from returned
+       * endpoints, in order, from front to back.
+       *
+       * Note that a client may ignore "FlightInfo.ordered = true". If an
+       * ordering is important for an application, an application must
+       * choose one of them:
+       *
+       * * An application requires that all clients must read data in
+       *   returned endpoints order.
+       * * An application must return the all data in a single endpoint.
+       * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + public org.apache.arrow.flight.impl.Flight.FlightEndpoint getEndpoint(int index) { + if (endpointBuilder_ == null) { + return endpoint_.get(index); + } else { + return endpointBuilder_.getMessage(index); + } + } + /** + *
+       *
+       * A list of endpoints associated with the flight. To consume the
+       * whole flight, all endpoints (and hence all Tickets) must be
+       * consumed. Endpoints can be consumed in any order.
+       *
+       * In other words, an application can use multiple endpoints to
+       * represent partitioned data.
+       *
+       * If the returned data has an ordering, an application can use
+       * "FlightInfo.ordered = true" or should return the all data in a
+       * single endpoint. Otherwise, there is no ordering defined on
+       * endpoints or the data within.
+       *
+       * A client can read ordered data by reading data from returned
+       * endpoints, in order, from front to back.
+       *
+       * Note that a client may ignore "FlightInfo.ordered = true". If an
+       * ordering is important for an application, an application must
+       * choose one of them:
+       *
+       * * An application requires that all clients must read data in
+       *   returned endpoints order.
+       * * An application must return the all data in a single endpoint.
+       * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + public Builder setEndpoint( + int index, org.apache.arrow.flight.impl.Flight.FlightEndpoint value) { + if (endpointBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureEndpointIsMutable(); + endpoint_.set(index, value); + onChanged(); + } else { + endpointBuilder_.setMessage(index, value); + } + return this; + } + /** + *
+       *
+       * A list of endpoints associated with the flight. To consume the
+       * whole flight, all endpoints (and hence all Tickets) must be
+       * consumed. Endpoints can be consumed in any order.
+       *
+       * In other words, an application can use multiple endpoints to
+       * represent partitioned data.
+       *
+       * If the returned data has an ordering, an application can use
+       * "FlightInfo.ordered = true" or should return the all data in a
+       * single endpoint. Otherwise, there is no ordering defined on
+       * endpoints or the data within.
+       *
+       * A client can read ordered data by reading data from returned
+       * endpoints, in order, from front to back.
+       *
+       * Note that a client may ignore "FlightInfo.ordered = true". If an
+       * ordering is important for an application, an application must
+       * choose one of them:
+       *
+       * * An application requires that all clients must read data in
+       *   returned endpoints order.
+       * * An application must return the all data in a single endpoint.
+       * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + public Builder setEndpoint( + int index, org.apache.arrow.flight.impl.Flight.FlightEndpoint.Builder builderForValue) { + if (endpointBuilder_ == null) { + ensureEndpointIsMutable(); + endpoint_.set(index, builderForValue.build()); + onChanged(); + } else { + endpointBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+       *
+       * A list of endpoints associated with the flight. To consume the
+       * whole flight, all endpoints (and hence all Tickets) must be
+       * consumed. Endpoints can be consumed in any order.
+       *
+       * In other words, an application can use multiple endpoints to
+       * represent partitioned data.
+       *
+       * If the returned data has an ordering, an application can use
+       * "FlightInfo.ordered = true" or should return the all data in a
+       * single endpoint. Otherwise, there is no ordering defined on
+       * endpoints or the data within.
+       *
+       * A client can read ordered data by reading data from returned
+       * endpoints, in order, from front to back.
+       *
+       * Note that a client may ignore "FlightInfo.ordered = true". If an
+       * ordering is important for an application, an application must
+       * choose one of them:
+       *
+       * * An application requires that all clients must read data in
+       *   returned endpoints order.
+       * * An application must return the all data in a single endpoint.
+       * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + public Builder addEndpoint(org.apache.arrow.flight.impl.Flight.FlightEndpoint value) { + if (endpointBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureEndpointIsMutable(); + endpoint_.add(value); + onChanged(); + } else { + endpointBuilder_.addMessage(value); + } + return this; + } + /** + *
+       *
+       * A list of endpoints associated with the flight. To consume the
+       * whole flight, all endpoints (and hence all Tickets) must be
+       * consumed. Endpoints can be consumed in any order.
+       *
+       * In other words, an application can use multiple endpoints to
+       * represent partitioned data.
+       *
+       * If the returned data has an ordering, an application can use
+       * "FlightInfo.ordered = true" or should return the all data in a
+       * single endpoint. Otherwise, there is no ordering defined on
+       * endpoints or the data within.
+       *
+       * A client can read ordered data by reading data from returned
+       * endpoints, in order, from front to back.
+       *
+       * Note that a client may ignore "FlightInfo.ordered = true". If an
+       * ordering is important for an application, an application must
+       * choose one of them:
+       *
+       * * An application requires that all clients must read data in
+       *   returned endpoints order.
+       * * An application must return the all data in a single endpoint.
+       * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + public Builder addEndpoint( + int index, org.apache.arrow.flight.impl.Flight.FlightEndpoint value) { + if (endpointBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureEndpointIsMutable(); + endpoint_.add(index, value); + onChanged(); + } else { + endpointBuilder_.addMessage(index, value); + } + return this; + } + /** + *
+       *
+       * A list of endpoints associated with the flight. To consume the
+       * whole flight, all endpoints (and hence all Tickets) must be
+       * consumed. Endpoints can be consumed in any order.
+       *
+       * In other words, an application can use multiple endpoints to
+       * represent partitioned data.
+       *
+       * If the returned data has an ordering, an application can use
+       * "FlightInfo.ordered = true" or should return the all data in a
+       * single endpoint. Otherwise, there is no ordering defined on
+       * endpoints or the data within.
+       *
+       * A client can read ordered data by reading data from returned
+       * endpoints, in order, from front to back.
+       *
+       * Note that a client may ignore "FlightInfo.ordered = true". If an
+       * ordering is important for an application, an application must
+       * choose one of them:
+       *
+       * * An application requires that all clients must read data in
+       *   returned endpoints order.
+       * * An application must return the all data in a single endpoint.
+       * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + public Builder addEndpoint( + org.apache.arrow.flight.impl.Flight.FlightEndpoint.Builder builderForValue) { + if (endpointBuilder_ == null) { + ensureEndpointIsMutable(); + endpoint_.add(builderForValue.build()); + onChanged(); + } else { + endpointBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + *
+       *
+       * A list of endpoints associated with the flight. To consume the
+       * whole flight, all endpoints (and hence all Tickets) must be
+       * consumed. Endpoints can be consumed in any order.
+       *
+       * In other words, an application can use multiple endpoints to
+       * represent partitioned data.
+       *
+       * If the returned data has an ordering, an application can use
+       * "FlightInfo.ordered = true" or should return the all data in a
+       * single endpoint. Otherwise, there is no ordering defined on
+       * endpoints or the data within.
+       *
+       * A client can read ordered data by reading data from returned
+       * endpoints, in order, from front to back.
+       *
+       * Note that a client may ignore "FlightInfo.ordered = true". If an
+       * ordering is important for an application, an application must
+       * choose one of them:
+       *
+       * * An application requires that all clients must read data in
+       *   returned endpoints order.
+       * * An application must return the all data in a single endpoint.
+       * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + public Builder addEndpoint( + int index, org.apache.arrow.flight.impl.Flight.FlightEndpoint.Builder builderForValue) { + if (endpointBuilder_ == null) { + ensureEndpointIsMutable(); + endpoint_.add(index, builderForValue.build()); + onChanged(); + } else { + endpointBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+       *
+       * A list of endpoints associated with the flight. To consume the
+       * whole flight, all endpoints (and hence all Tickets) must be
+       * consumed. Endpoints can be consumed in any order.
+       *
+       * In other words, an application can use multiple endpoints to
+       * represent partitioned data.
+       *
+       * If the returned data has an ordering, an application can use
+       * "FlightInfo.ordered = true" or should return the all data in a
+       * single endpoint. Otherwise, there is no ordering defined on
+       * endpoints or the data within.
+       *
+       * A client can read ordered data by reading data from returned
+       * endpoints, in order, from front to back.
+       *
+       * Note that a client may ignore "FlightInfo.ordered = true". If an
+       * ordering is important for an application, an application must
+       * choose one of them:
+       *
+       * * An application requires that all clients must read data in
+       *   returned endpoints order.
+       * * An application must return the all data in a single endpoint.
+       * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + public Builder addAllEndpoint( + java.lang.Iterable values) { + if (endpointBuilder_ == null) { + ensureEndpointIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, endpoint_); + onChanged(); + } else { + endpointBuilder_.addAllMessages(values); + } + return this; + } + /** + *
+       *
+       * A list of endpoints associated with the flight. To consume the
+       * whole flight, all endpoints (and hence all Tickets) must be
+       * consumed. Endpoints can be consumed in any order.
+       *
+       * In other words, an application can use multiple endpoints to
+       * represent partitioned data.
+       *
+       * If the returned data has an ordering, an application can use
+       * "FlightInfo.ordered = true" or should return the all data in a
+       * single endpoint. Otherwise, there is no ordering defined on
+       * endpoints or the data within.
+       *
+       * A client can read ordered data by reading data from returned
+       * endpoints, in order, from front to back.
+       *
+       * Note that a client may ignore "FlightInfo.ordered = true". If an
+       * ordering is important for an application, an application must
+       * choose one of them:
+       *
+       * * An application requires that all clients must read data in
+       *   returned endpoints order.
+       * * An application must return the all data in a single endpoint.
+       * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + public Builder clearEndpoint() { + if (endpointBuilder_ == null) { + endpoint_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + } else { + endpointBuilder_.clear(); + } + return this; + } + /** + *
+       *
+       * A list of endpoints associated with the flight. To consume the
+       * whole flight, all endpoints (and hence all Tickets) must be
+       * consumed. Endpoints can be consumed in any order.
+       *
+       * In other words, an application can use multiple endpoints to
+       * represent partitioned data.
+       *
+       * If the returned data has an ordering, an application can use
+       * "FlightInfo.ordered = true" or should return the all data in a
+       * single endpoint. Otherwise, there is no ordering defined on
+       * endpoints or the data within.
+       *
+       * A client can read ordered data by reading data from returned
+       * endpoints, in order, from front to back.
+       *
+       * Note that a client may ignore "FlightInfo.ordered = true". If an
+       * ordering is important for an application, an application must
+       * choose one of them:
+       *
+       * * An application requires that all clients must read data in
+       *   returned endpoints order.
+       * * An application must return the all data in a single endpoint.
+       * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + public Builder removeEndpoint(int index) { + if (endpointBuilder_ == null) { + ensureEndpointIsMutable(); + endpoint_.remove(index); + onChanged(); + } else { + endpointBuilder_.remove(index); + } + return this; + } + /** + *
+       *
+       * A list of endpoints associated with the flight. To consume the
+       * whole flight, all endpoints (and hence all Tickets) must be
+       * consumed. Endpoints can be consumed in any order.
+       *
+       * In other words, an application can use multiple endpoints to
+       * represent partitioned data.
+       *
+       * If the returned data has an ordering, an application can use
+       * "FlightInfo.ordered = true" or should return the all data in a
+       * single endpoint. Otherwise, there is no ordering defined on
+       * endpoints or the data within.
+       *
+       * A client can read ordered data by reading data from returned
+       * endpoints, in order, from front to back.
+       *
+       * Note that a client may ignore "FlightInfo.ordered = true". If an
+       * ordering is important for an application, an application must
+       * choose one of them:
+       *
+       * * An application requires that all clients must read data in
+       *   returned endpoints order.
+       * * An application must return the all data in a single endpoint.
+       * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + public org.apache.arrow.flight.impl.Flight.FlightEndpoint.Builder getEndpointBuilder( + int index) { + return getEndpointFieldBuilder().getBuilder(index); + } + /** + *
+       *
+       * A list of endpoints associated with the flight. To consume the
+       * whole flight, all endpoints (and hence all Tickets) must be
+       * consumed. Endpoints can be consumed in any order.
+       *
+       * In other words, an application can use multiple endpoints to
+       * represent partitioned data.
+       *
+       * If the returned data has an ordering, an application can use
+       * "FlightInfo.ordered = true" or should return the all data in a
+       * single endpoint. Otherwise, there is no ordering defined on
+       * endpoints or the data within.
+       *
+       * A client can read ordered data by reading data from returned
+       * endpoints, in order, from front to back.
+       *
+       * Note that a client may ignore "FlightInfo.ordered = true". If an
+       * ordering is important for an application, an application must
+       * choose one of them:
+       *
+       * * An application requires that all clients must read data in
+       *   returned endpoints order.
+       * * An application must return the all data in a single endpoint.
+       * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + public org.apache.arrow.flight.impl.Flight.FlightEndpointOrBuilder getEndpointOrBuilder( + int index) { + if (endpointBuilder_ == null) { + return endpoint_.get(index); } else { + return endpointBuilder_.getMessageOrBuilder(index); + } + } + /** + *
+       *
+       * A list of endpoints associated with the flight. To consume the
+       * whole flight, all endpoints (and hence all Tickets) must be
+       * consumed. Endpoints can be consumed in any order.
+       *
+       * In other words, an application can use multiple endpoints to
+       * represent partitioned data.
+       *
+       * If the returned data has an ordering, an application can use
+       * "FlightInfo.ordered = true" or should return the all data in a
+       * single endpoint. Otherwise, there is no ordering defined on
+       * endpoints or the data within.
+       *
+       * A client can read ordered data by reading data from returned
+       * endpoints, in order, from front to back.
+       *
+       * Note that a client may ignore "FlightInfo.ordered = true". If an
+       * ordering is important for an application, an application must
+       * choose one of them:
+       *
+       * * An application requires that all clients must read data in
+       *   returned endpoints order.
+       * * An application must return the all data in a single endpoint.
+       * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + public java.util.List + getEndpointOrBuilderList() { + if (endpointBuilder_ != null) { + return endpointBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(endpoint_); + } + } + /** + *
+       *
+       * A list of endpoints associated with the flight. To consume the
+       * whole flight, all endpoints (and hence all Tickets) must be
+       * consumed. Endpoints can be consumed in any order.
+       *
+       * In other words, an application can use multiple endpoints to
+       * represent partitioned data.
+       *
+       * If the returned data has an ordering, an application can use
+       * "FlightInfo.ordered = true" or should return the all data in a
+       * single endpoint. Otherwise, there is no ordering defined on
+       * endpoints or the data within.
+       *
+       * A client can read ordered data by reading data from returned
+       * endpoints, in order, from front to back.
+       *
+       * Note that a client may ignore "FlightInfo.ordered = true". If an
+       * ordering is important for an application, an application must
+       * choose one of them:
+       *
+       * * An application requires that all clients must read data in
+       *   returned endpoints order.
+       * * An application must return the all data in a single endpoint.
+       * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + public org.apache.arrow.flight.impl.Flight.FlightEndpoint.Builder addEndpointBuilder() { + return getEndpointFieldBuilder().addBuilder( + org.apache.arrow.flight.impl.Flight.FlightEndpoint.getDefaultInstance()); + } + /** + *
+       *
+       * A list of endpoints associated with the flight. To consume the
+       * whole flight, all endpoints (and hence all Tickets) must be
+       * consumed. Endpoints can be consumed in any order.
+       *
+       * In other words, an application can use multiple endpoints to
+       * represent partitioned data.
+       *
+       * If the returned data has an ordering, an application can use
+       * "FlightInfo.ordered = true" or should return the all data in a
+       * single endpoint. Otherwise, there is no ordering defined on
+       * endpoints or the data within.
+       *
+       * A client can read ordered data by reading data from returned
+       * endpoints, in order, from front to back.
+       *
+       * Note that a client may ignore "FlightInfo.ordered = true". If an
+       * ordering is important for an application, an application must
+       * choose one of them:
+       *
+       * * An application requires that all clients must read data in
+       *   returned endpoints order.
+       * * An application must return the all data in a single endpoint.
+       * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + public org.apache.arrow.flight.impl.Flight.FlightEndpoint.Builder addEndpointBuilder( + int index) { + return getEndpointFieldBuilder().addBuilder( + index, org.apache.arrow.flight.impl.Flight.FlightEndpoint.getDefaultInstance()); + } + /** + *
+       *
+       * A list of endpoints associated with the flight. To consume the
+       * whole flight, all endpoints (and hence all Tickets) must be
+       * consumed. Endpoints can be consumed in any order.
+       *
+       * In other words, an application can use multiple endpoints to
+       * represent partitioned data.
+       *
+       * If the returned data has an ordering, an application can use
+       * "FlightInfo.ordered = true" or should return the all data in a
+       * single endpoint. Otherwise, there is no ordering defined on
+       * endpoints or the data within.
+       *
+       * A client can read ordered data by reading data from returned
+       * endpoints, in order, from front to back.
+       *
+       * Note that a client may ignore "FlightInfo.ordered = true". If an
+       * ordering is important for an application, an application must
+       * choose one of them:
+       *
+       * * An application requires that all clients must read data in
+       *   returned endpoints order.
+       * * An application must return the all data in a single endpoint.
+       * 
+ * + * repeated .arrow.flight.protocol.FlightEndpoint endpoint = 3; + */ + public java.util.List + getEndpointBuilderList() { + return getEndpointFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.FlightEndpoint, org.apache.arrow.flight.impl.Flight.FlightEndpoint.Builder, org.apache.arrow.flight.impl.Flight.FlightEndpointOrBuilder> + getEndpointFieldBuilder() { + if (endpointBuilder_ == null) { + endpointBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.FlightEndpoint, org.apache.arrow.flight.impl.Flight.FlightEndpoint.Builder, org.apache.arrow.flight.impl.Flight.FlightEndpointOrBuilder>( + endpoint_, + ((bitField0_ & 0x00000004) != 0), + getParentForChildren(), + isClean()); + endpoint_ = null; + } + return endpointBuilder_; + } + + private long totalRecords_ ; + /** + *
+       * Set these to -1 if unknown.
+       * 
+ * + * int64 total_records = 4; + * @return The totalRecords. + */ + @java.lang.Override + public long getTotalRecords() { + return totalRecords_; + } + /** + *
+       * Set these to -1 if unknown.
+       * 
+ * + * int64 total_records = 4; + * @param value The totalRecords to set. + * @return This builder for chaining. + */ + public Builder setTotalRecords(long value) { + + totalRecords_ = value; + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + *
+       * Set these to -1 if unknown.
+       * 
+ * + * int64 total_records = 4; + * @return This builder for chaining. + */ + public Builder clearTotalRecords() { + bitField0_ = (bitField0_ & ~0x00000008); + totalRecords_ = 0L; + onChanged(); + return this; + } + + private long totalBytes_ ; + /** + * int64 total_bytes = 5; + * @return The totalBytes. + */ + @java.lang.Override + public long getTotalBytes() { + return totalBytes_; + } + /** + * int64 total_bytes = 5; + * @param value The totalBytes to set. + * @return This builder for chaining. + */ + public Builder setTotalBytes(long value) { + + totalBytes_ = value; + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + * int64 total_bytes = 5; + * @return This builder for chaining. + */ + public Builder clearTotalBytes() { + bitField0_ = (bitField0_ & ~0x00000010); + totalBytes_ = 0L; + onChanged(); + return this; + } + + private boolean ordered_ ; + /** + *
+       *
+       * FlightEndpoints are in the same order as the data.
+       * 
+ * + * bool ordered = 6; + * @return The ordered. + */ + @java.lang.Override + public boolean getOrdered() { + return ordered_; + } + /** + *
+       *
+       * FlightEndpoints are in the same order as the data.
+       * 
+ * + * bool ordered = 6; + * @param value The ordered to set. + * @return This builder for chaining. + */ + public Builder setOrdered(boolean value) { + + ordered_ = value; + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + /** + *
+       *
+       * FlightEndpoints are in the same order as the data.
+       * 
+ * + * bool ordered = 6; + * @return This builder for chaining. + */ + public Builder clearOrdered() { + bitField0_ = (bitField0_ & ~0x00000020); + ordered_ = false; + onChanged(); + return this; + } + + private com.google.protobuf.ByteString appMetadata_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       *
+       * Application-defined metadata.
+       *
+       * There is no inherent or required relationship between this
+       * and the app_metadata fields in the FlightEndpoints or resulting
+       * FlightData messages. Since this metadata is application-defined,
+       * a given application could define there to be a relationship,
+       * but there is none required by the spec.
+       * 
+ * + * bytes app_metadata = 7; + * @return The appMetadata. + */ + @java.lang.Override + public com.google.protobuf.ByteString getAppMetadata() { + return appMetadata_; + } + /** + *
+       *
+       * Application-defined metadata.
+       *
+       * There is no inherent or required relationship between this
+       * and the app_metadata fields in the FlightEndpoints or resulting
+       * FlightData messages. Since this metadata is application-defined,
+       * a given application could define there to be a relationship,
+       * but there is none required by the spec.
+       * 
+ * + * bytes app_metadata = 7; + * @param value The appMetadata to set. + * @return This builder for chaining. + */ + public Builder setAppMetadata(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + appMetadata_ = value; + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + /** + *
+       *
+       * Application-defined metadata.
+       *
+       * There is no inherent or required relationship between this
+       * and the app_metadata fields in the FlightEndpoints or resulting
+       * FlightData messages. Since this metadata is application-defined,
+       * a given application could define there to be a relationship,
+       * but there is none required by the spec.
+       * 
+ * + * bytes app_metadata = 7; + * @return This builder for chaining. + */ + public Builder clearAppMetadata() { + bitField0_ = (bitField0_ & ~0x00000040); + appMetadata_ = getDefaultInstance().getAppMetadata(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.FlightInfo) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.FlightInfo) + private static final org.apache.arrow.flight.impl.Flight.FlightInfo DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.FlightInfo(); + } + + public static org.apache.arrow.flight.impl.Flight.FlightInfo getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public FlightInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightInfo getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface PollInfoOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.PollInfo) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     *
+     * The currently available results.
+     *
+     * If "flight_descriptor" is not specified, the query is complete
+     * and "info" specifies all results. Otherwise, "info" contains
+     * partial query results.
+     *
+     * Note that each PollInfo response contains a complete
+     * FlightInfo (not just the delta between the previous and current
+     * FlightInfo).
+     *
+     * Subsequent PollInfo responses may only append new endpoints to
+     * info.
+     *
+     * Clients can begin fetching results via DoGet(Ticket) with the
+     * ticket in the info before the query is
+     * completed. FlightInfo.ordered is also valid.
+     * 
+ * + * .arrow.flight.protocol.FlightInfo info = 1; + * @return Whether the info field is set. + */ + boolean hasInfo(); + /** + *
+     *
+     * The currently available results.
+     *
+     * If "flight_descriptor" is not specified, the query is complete
+     * and "info" specifies all results. Otherwise, "info" contains
+     * partial query results.
+     *
+     * Note that each PollInfo response contains a complete
+     * FlightInfo (not just the delta between the previous and current
+     * FlightInfo).
+     *
+     * Subsequent PollInfo responses may only append new endpoints to
+     * info.
+     *
+     * Clients can begin fetching results via DoGet(Ticket) with the
+     * ticket in the info before the query is
+     * completed. FlightInfo.ordered is also valid.
+     * 
+ * + * .arrow.flight.protocol.FlightInfo info = 1; + * @return The info. + */ + org.apache.arrow.flight.impl.Flight.FlightInfo getInfo(); + /** + *
+     *
+     * The currently available results.
+     *
+     * If "flight_descriptor" is not specified, the query is complete
+     * and "info" specifies all results. Otherwise, "info" contains
+     * partial query results.
+     *
+     * Note that each PollInfo response contains a complete
+     * FlightInfo (not just the delta between the previous and current
+     * FlightInfo).
+     *
+     * Subsequent PollInfo responses may only append new endpoints to
+     * info.
+     *
+     * Clients can begin fetching results via DoGet(Ticket) with the
+     * ticket in the info before the query is
+     * completed. FlightInfo.ordered is also valid.
+     * 
+ * + * .arrow.flight.protocol.FlightInfo info = 1; + */ + org.apache.arrow.flight.impl.Flight.FlightInfoOrBuilder getInfoOrBuilder(); + + /** + *
+     *
+     * The descriptor the client should use on the next try.
+     * If unset, the query is complete.
+     * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + * @return Whether the flightDescriptor field is set. + */ + boolean hasFlightDescriptor(); + /** + *
+     *
+     * The descriptor the client should use on the next try.
+     * If unset, the query is complete.
+     * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + * @return The flightDescriptor. + */ + org.apache.arrow.flight.impl.Flight.FlightDescriptor getFlightDescriptor(); + /** + *
+     *
+     * The descriptor the client should use on the next try.
+     * If unset, the query is complete.
+     * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + */ + org.apache.arrow.flight.impl.Flight.FlightDescriptorOrBuilder getFlightDescriptorOrBuilder(); + + /** + *
+     *
+     * Query progress. If known, must be in [0.0, 1.0] but need not be
+     * monotonic or nondecreasing. If unknown, do not set.
+     * 
+ * + * optional double progress = 3; + * @return Whether the progress field is set. + */ + boolean hasProgress(); + /** + *
+     *
+     * Query progress. If known, must be in [0.0, 1.0] but need not be
+     * monotonic or nondecreasing. If unknown, do not set.
+     * 
+ * + * optional double progress = 3; + * @return The progress. + */ + double getProgress(); + + /** + *
+     *
+     * Expiration time for this request. After this passes, the server
+     * might not accept the retry descriptor anymore (and the query may
+     * be cancelled). This may be updated on a call to PollFlightInfo.
+     * 
+ * + * .google.protobuf.Timestamp expiration_time = 4; + * @return Whether the expirationTime field is set. + */ + boolean hasExpirationTime(); + /** + *
+     *
+     * Expiration time for this request. After this passes, the server
+     * might not accept the retry descriptor anymore (and the query may
+     * be cancelled). This may be updated on a call to PollFlightInfo.
+     * 
+ * + * .google.protobuf.Timestamp expiration_time = 4; + * @return The expirationTime. + */ + com.google.protobuf.Timestamp getExpirationTime(); + /** + *
+     *
+     * Expiration time for this request. After this passes, the server
+     * might not accept the retry descriptor anymore (and the query may
+     * be cancelled). This may be updated on a call to PollFlightInfo.
+     * 
+ * + * .google.protobuf.Timestamp expiration_time = 4; + */ + com.google.protobuf.TimestampOrBuilder getExpirationTimeOrBuilder(); + } + /** + *
+   *
+   * The information to process a long-running query.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.PollInfo} + */ + public static final class PollInfo extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.PollInfo) + PollInfoOrBuilder { + private static final long serialVersionUID = 0L; + // Use PollInfo.newBuilder() to construct. + private PollInfo(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private PollInfo() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new PollInfo(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_PollInfo_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_PollInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.PollInfo.class, org.apache.arrow.flight.impl.Flight.PollInfo.Builder.class); + } + + private int bitField0_; + public static final int INFO_FIELD_NUMBER = 1; + private org.apache.arrow.flight.impl.Flight.FlightInfo info_; + /** + *
+     *
+     * The currently available results.
+     *
+     * If "flight_descriptor" is not specified, the query is complete
+     * and "info" specifies all results. Otherwise, "info" contains
+     * partial query results.
+     *
+     * Note that each PollInfo response contains a complete
+     * FlightInfo (not just the delta between the previous and current
+     * FlightInfo).
+     *
+     * Subsequent PollInfo responses may only append new endpoints to
+     * info.
+     *
+     * Clients can begin fetching results via DoGet(Ticket) with the
+     * ticket in the info before the query is
+     * completed. FlightInfo.ordered is also valid.
+     * 
+ * + * .arrow.flight.protocol.FlightInfo info = 1; + * @return Whether the info field is set. + */ + @java.lang.Override + public boolean hasInfo() { + return info_ != null; + } + /** + *
+     *
+     * The currently available results.
+     *
+     * If "flight_descriptor" is not specified, the query is complete
+     * and "info" specifies all results. Otherwise, "info" contains
+     * partial query results.
+     *
+     * Note that each PollInfo response contains a complete
+     * FlightInfo (not just the delta between the previous and current
+     * FlightInfo).
+     *
+     * Subsequent PollInfo responses may only append new endpoints to
+     * info.
+     *
+     * Clients can begin fetching results via DoGet(Ticket) with the
+     * ticket in the info before the query is
+     * completed. FlightInfo.ordered is also valid.
+     * 
+ * + * .arrow.flight.protocol.FlightInfo info = 1; + * @return The info. + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightInfo getInfo() { + return info_ == null ? org.apache.arrow.flight.impl.Flight.FlightInfo.getDefaultInstance() : info_; + } + /** + *
+     *
+     * The currently available results.
+     *
+     * If "flight_descriptor" is not specified, the query is complete
+     * and "info" specifies all results. Otherwise, "info" contains
+     * partial query results.
+     *
+     * Note that each PollInfo response contains a complete
+     * FlightInfo (not just the delta between the previous and current
+     * FlightInfo).
+     *
+     * Subsequent PollInfo responses may only append new endpoints to
+     * info.
+     *
+     * Clients can begin fetching results via DoGet(Ticket) with the
+     * ticket in the info before the query is
+     * completed. FlightInfo.ordered is also valid.
+     * 
+ * + * .arrow.flight.protocol.FlightInfo info = 1; + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightInfoOrBuilder getInfoOrBuilder() { + return info_ == null ? org.apache.arrow.flight.impl.Flight.FlightInfo.getDefaultInstance() : info_; + } + + public static final int FLIGHT_DESCRIPTOR_FIELD_NUMBER = 2; + private org.apache.arrow.flight.impl.Flight.FlightDescriptor flightDescriptor_; + /** + *
+     *
+     * The descriptor the client should use on the next try.
+     * If unset, the query is complete.
+     * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + * @return Whether the flightDescriptor field is set. + */ + @java.lang.Override + public boolean hasFlightDescriptor() { + return flightDescriptor_ != null; + } + /** + *
+     *
+     * The descriptor the client should use on the next try.
+     * If unset, the query is complete.
+     * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + * @return The flightDescriptor. + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightDescriptor getFlightDescriptor() { + return flightDescriptor_ == null ? org.apache.arrow.flight.impl.Flight.FlightDescriptor.getDefaultInstance() : flightDescriptor_; + } + /** + *
+     *
+     * The descriptor the client should use on the next try.
+     * If unset, the query is complete.
+     * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightDescriptorOrBuilder getFlightDescriptorOrBuilder() { + return flightDescriptor_ == null ? org.apache.arrow.flight.impl.Flight.FlightDescriptor.getDefaultInstance() : flightDescriptor_; + } + + public static final int PROGRESS_FIELD_NUMBER = 3; + private double progress_ = 0D; + /** + *
+     *
+     * Query progress. If known, must be in [0.0, 1.0] but need not be
+     * monotonic or nondecreasing. If unknown, do not set.
+     * 
+ * + * optional double progress = 3; + * @return Whether the progress field is set. + */ + @java.lang.Override + public boolean hasProgress() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+     *
+     * Query progress. If known, must be in [0.0, 1.0] but need not be
+     * monotonic or nondecreasing. If unknown, do not set.
+     * 
+ * + * optional double progress = 3; + * @return The progress. + */ + @java.lang.Override + public double getProgress() { + return progress_; + } + + public static final int EXPIRATION_TIME_FIELD_NUMBER = 4; + private com.google.protobuf.Timestamp expirationTime_; + /** + *
+     *
+     * Expiration time for this request. After this passes, the server
+     * might not accept the retry descriptor anymore (and the query may
+     * be cancelled). This may be updated on a call to PollFlightInfo.
+     * 
+ * + * .google.protobuf.Timestamp expiration_time = 4; + * @return Whether the expirationTime field is set. + */ + @java.lang.Override + public boolean hasExpirationTime() { + return expirationTime_ != null; + } + /** + *
+     *
+     * Expiration time for this request. After this passes, the server
+     * might not accept the retry descriptor anymore (and the query may
+     * be cancelled). This may be updated on a call to PollFlightInfo.
+     * 
+ * + * .google.protobuf.Timestamp expiration_time = 4; + * @return The expirationTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getExpirationTime() { + return expirationTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : expirationTime_; + } + /** + *
+     *
+     * Expiration time for this request. After this passes, the server
+     * might not accept the retry descriptor anymore (and the query may
+     * be cancelled). This may be updated on a call to PollFlightInfo.
+     * 
+ * + * .google.protobuf.Timestamp expiration_time = 4; + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getExpirationTimeOrBuilder() { + return expirationTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : expirationTime_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (info_ != null) { + output.writeMessage(1, getInfo()); + } + if (flightDescriptor_ != null) { + output.writeMessage(2, getFlightDescriptor()); + } + if (((bitField0_ & 0x00000001) != 0)) { + output.writeDouble(3, progress_); + } + if (expirationTime_ != null) { + output.writeMessage(4, getExpirationTime()); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (info_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getInfo()); + } + if (flightDescriptor_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, getFlightDescriptor()); + } + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeDoubleSize(3, progress_); + } + if (expirationTime_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, getExpirationTime()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.PollInfo)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.PollInfo other = (org.apache.arrow.flight.impl.Flight.PollInfo) obj; + + if (hasInfo() != other.hasInfo()) return false; + if (hasInfo()) { + if (!getInfo() + .equals(other.getInfo())) return false; + } + if (hasFlightDescriptor() != other.hasFlightDescriptor()) return false; + if (hasFlightDescriptor()) { + if (!getFlightDescriptor() + .equals(other.getFlightDescriptor())) return false; + } + if (hasProgress() != other.hasProgress()) return false; + if (hasProgress()) { + if (java.lang.Double.doubleToLongBits(getProgress()) + != java.lang.Double.doubleToLongBits( + other.getProgress())) return false; + } + if (hasExpirationTime() != other.hasExpirationTime()) return false; + if (hasExpirationTime()) { + if (!getExpirationTime() + .equals(other.getExpirationTime())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasInfo()) { + hash = (37 * hash) + INFO_FIELD_NUMBER; + hash = (53 * hash) + getInfo().hashCode(); + } + if (hasFlightDescriptor()) { + hash = (37 * hash) + FLIGHT_DESCRIPTOR_FIELD_NUMBER; + hash = (53 * hash) + getFlightDescriptor().hashCode(); + } + if (hasProgress()) { + hash = (37 * hash) + PROGRESS_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + java.lang.Double.doubleToLongBits(getProgress())); + } + if (hasExpirationTime()) { + hash = (37 * hash) + EXPIRATION_TIME_FIELD_NUMBER; + hash = (53 * hash) + getExpirationTime().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.PollInfo parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.PollInfo parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.PollInfo parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.PollInfo parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.PollInfo parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.PollInfo parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.PollInfo parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.PollInfo parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.PollInfo parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.PollInfo parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.PollInfo parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.PollInfo parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.PollInfo prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * The information to process a long-running query.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.PollInfo} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.PollInfo) + org.apache.arrow.flight.impl.Flight.PollInfoOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_PollInfo_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_PollInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.PollInfo.class, org.apache.arrow.flight.impl.Flight.PollInfo.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.PollInfo.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + info_ = null; + if (infoBuilder_ != null) { + infoBuilder_.dispose(); + infoBuilder_ = null; + } + flightDescriptor_ = null; + if (flightDescriptorBuilder_ != null) { + flightDescriptorBuilder_.dispose(); + flightDescriptorBuilder_ = null; + } + progress_ = 0D; + expirationTime_ = null; + if (expirationTimeBuilder_ != null) { + expirationTimeBuilder_.dispose(); + expirationTimeBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_PollInfo_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.PollInfo getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.PollInfo.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.PollInfo build() { + org.apache.arrow.flight.impl.Flight.PollInfo result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.PollInfo buildPartial() { + org.apache.arrow.flight.impl.Flight.PollInfo result = new org.apache.arrow.flight.impl.Flight.PollInfo(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.PollInfo result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.info_ = infoBuilder_ == null + ? info_ + : infoBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.flightDescriptor_ = flightDescriptorBuilder_ == null + ? flightDescriptor_ + : flightDescriptorBuilder_.build(); + } + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000004) != 0)) { + result.progress_ = progress_; + to_bitField0_ |= 0x00000001; + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.expirationTime_ = expirationTimeBuilder_ == null + ? expirationTime_ + : expirationTimeBuilder_.build(); + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.PollInfo) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.PollInfo)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.PollInfo other) { + if (other == org.apache.arrow.flight.impl.Flight.PollInfo.getDefaultInstance()) return this; + if (other.hasInfo()) { + mergeInfo(other.getInfo()); + } + if (other.hasFlightDescriptor()) { + mergeFlightDescriptor(other.getFlightDescriptor()); + } + if (other.hasProgress()) { + setProgress(other.getProgress()); + } + if (other.hasExpirationTime()) { + mergeExpirationTime(other.getExpirationTime()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + input.readMessage( + getInfoFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + input.readMessage( + getFlightDescriptorFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 25: { + progress_ = input.readDouble(); + bitField0_ |= 0x00000004; + break; + } // case 25 + case 34: { + input.readMessage( + getExpirationTimeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000008; + break; + } // case 34 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private org.apache.arrow.flight.impl.Flight.FlightInfo info_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.FlightInfo, org.apache.arrow.flight.impl.Flight.FlightInfo.Builder, org.apache.arrow.flight.impl.Flight.FlightInfoOrBuilder> infoBuilder_; + /** + *
+       *
+       * The currently available results.
+       *
+       * If "flight_descriptor" is not specified, the query is complete
+       * and "info" specifies all results. Otherwise, "info" contains
+       * partial query results.
+       *
+       * Note that each PollInfo response contains a complete
+       * FlightInfo (not just the delta between the previous and current
+       * FlightInfo).
+       *
+       * Subsequent PollInfo responses may only append new endpoints to
+       * info.
+       *
+       * Clients can begin fetching results via DoGet(Ticket) with the
+       * ticket in the info before the query is
+       * completed. FlightInfo.ordered is also valid.
+       * 
+ * + * .arrow.flight.protocol.FlightInfo info = 1; + * @return Whether the info field is set. + */ + public boolean hasInfo() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+       *
+       * The currently available results.
+       *
+       * If "flight_descriptor" is not specified, the query is complete
+       * and "info" specifies all results. Otherwise, "info" contains
+       * partial query results.
+       *
+       * Note that each PollInfo response contains a complete
+       * FlightInfo (not just the delta between the previous and current
+       * FlightInfo).
+       *
+       * Subsequent PollInfo responses may only append new endpoints to
+       * info.
+       *
+       * Clients can begin fetching results via DoGet(Ticket) with the
+       * ticket in the info before the query is
+       * completed. FlightInfo.ordered is also valid.
+       * 
+ * + * .arrow.flight.protocol.FlightInfo info = 1; + * @return The info. + */ + public org.apache.arrow.flight.impl.Flight.FlightInfo getInfo() { + if (infoBuilder_ == null) { + return info_ == null ? org.apache.arrow.flight.impl.Flight.FlightInfo.getDefaultInstance() : info_; + } else { + return infoBuilder_.getMessage(); + } + } + /** + *
+       *
+       * The currently available results.
+       *
+       * If "flight_descriptor" is not specified, the query is complete
+       * and "info" specifies all results. Otherwise, "info" contains
+       * partial query results.
+       *
+       * Note that each PollInfo response contains a complete
+       * FlightInfo (not just the delta between the previous and current
+       * FlightInfo).
+       *
+       * Subsequent PollInfo responses may only append new endpoints to
+       * info.
+       *
+       * Clients can begin fetching results via DoGet(Ticket) with the
+       * ticket in the info before the query is
+       * completed. FlightInfo.ordered is also valid.
+       * 
+ * + * .arrow.flight.protocol.FlightInfo info = 1; + */ + public Builder setInfo(org.apache.arrow.flight.impl.Flight.FlightInfo value) { + if (infoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + info_ = value; + } else { + infoBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       *
+       * The currently available results.
+       *
+       * If "flight_descriptor" is not specified, the query is complete
+       * and "info" specifies all results. Otherwise, "info" contains
+       * partial query results.
+       *
+       * Note that each PollInfo response contains a complete
+       * FlightInfo (not just the delta between the previous and current
+       * FlightInfo).
+       *
+       * Subsequent PollInfo responses may only append new endpoints to
+       * info.
+       *
+       * Clients can begin fetching results via DoGet(Ticket) with the
+       * ticket in the info before the query is
+       * completed. FlightInfo.ordered is also valid.
+       * 
+ * + * .arrow.flight.protocol.FlightInfo info = 1; + */ + public Builder setInfo( + org.apache.arrow.flight.impl.Flight.FlightInfo.Builder builderForValue) { + if (infoBuilder_ == null) { + info_ = builderForValue.build(); + } else { + infoBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       *
+       * The currently available results.
+       *
+       * If "flight_descriptor" is not specified, the query is complete
+       * and "info" specifies all results. Otherwise, "info" contains
+       * partial query results.
+       *
+       * Note that each PollInfo response contains a complete
+       * FlightInfo (not just the delta between the previous and current
+       * FlightInfo).
+       *
+       * Subsequent PollInfo responses may only append new endpoints to
+       * info.
+       *
+       * Clients can begin fetching results via DoGet(Ticket) with the
+       * ticket in the info before the query is
+       * completed. FlightInfo.ordered is also valid.
+       * 
+ * + * .arrow.flight.protocol.FlightInfo info = 1; + */ + public Builder mergeInfo(org.apache.arrow.flight.impl.Flight.FlightInfo value) { + if (infoBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0) && + info_ != null && + info_ != org.apache.arrow.flight.impl.Flight.FlightInfo.getDefaultInstance()) { + getInfoBuilder().mergeFrom(value); + } else { + info_ = value; + } + } else { + infoBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       *
+       * The currently available results.
+       *
+       * If "flight_descriptor" is not specified, the query is complete
+       * and "info" specifies all results. Otherwise, "info" contains
+       * partial query results.
+       *
+       * Note that each PollInfo response contains a complete
+       * FlightInfo (not just the delta between the previous and current
+       * FlightInfo).
+       *
+       * Subsequent PollInfo responses may only append new endpoints to
+       * info.
+       *
+       * Clients can begin fetching results via DoGet(Ticket) with the
+       * ticket in the info before the query is
+       * completed. FlightInfo.ordered is also valid.
+       * 
+ * + * .arrow.flight.protocol.FlightInfo info = 1; + */ + public Builder clearInfo() { + bitField0_ = (bitField0_ & ~0x00000001); + info_ = null; + if (infoBuilder_ != null) { + infoBuilder_.dispose(); + infoBuilder_ = null; + } + onChanged(); + return this; + } + /** + *
+       *
+       * The currently available results.
+       *
+       * If "flight_descriptor" is not specified, the query is complete
+       * and "info" specifies all results. Otherwise, "info" contains
+       * partial query results.
+       *
+       * Note that each PollInfo response contains a complete
+       * FlightInfo (not just the delta between the previous and current
+       * FlightInfo).
+       *
+       * Subsequent PollInfo responses may only append new endpoints to
+       * info.
+       *
+       * Clients can begin fetching results via DoGet(Ticket) with the
+       * ticket in the info before the query is
+       * completed. FlightInfo.ordered is also valid.
+       * 
+ * + * .arrow.flight.protocol.FlightInfo info = 1; + */ + public org.apache.arrow.flight.impl.Flight.FlightInfo.Builder getInfoBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getInfoFieldBuilder().getBuilder(); + } + /** + *
+       *
+       * The currently available results.
+       *
+       * If "flight_descriptor" is not specified, the query is complete
+       * and "info" specifies all results. Otherwise, "info" contains
+       * partial query results.
+       *
+       * Note that each PollInfo response contains a complete
+       * FlightInfo (not just the delta between the previous and current
+       * FlightInfo).
+       *
+       * Subsequent PollInfo responses may only append new endpoints to
+       * info.
+       *
+       * Clients can begin fetching results via DoGet(Ticket) with the
+       * ticket in the info before the query is
+       * completed. FlightInfo.ordered is also valid.
+       * 
+ * + * .arrow.flight.protocol.FlightInfo info = 1; + */ + public org.apache.arrow.flight.impl.Flight.FlightInfoOrBuilder getInfoOrBuilder() { + if (infoBuilder_ != null) { + return infoBuilder_.getMessageOrBuilder(); + } else { + return info_ == null ? + org.apache.arrow.flight.impl.Flight.FlightInfo.getDefaultInstance() : info_; + } + } + /** + *
+       *
+       * The currently available results.
+       *
+       * If "flight_descriptor" is not specified, the query is complete
+       * and "info" specifies all results. Otherwise, "info" contains
+       * partial query results.
+       *
+       * Note that each PollInfo response contains a complete
+       * FlightInfo (not just the delta between the previous and current
+       * FlightInfo).
+       *
+       * Subsequent PollInfo responses may only append new endpoints to
+       * info.
+       *
+       * Clients can begin fetching results via DoGet(Ticket) with the
+       * ticket in the info before the query is
+       * completed. FlightInfo.ordered is also valid.
+       * 
+ * + * .arrow.flight.protocol.FlightInfo info = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.FlightInfo, org.apache.arrow.flight.impl.Flight.FlightInfo.Builder, org.apache.arrow.flight.impl.Flight.FlightInfoOrBuilder> + getInfoFieldBuilder() { + if (infoBuilder_ == null) { + infoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.FlightInfo, org.apache.arrow.flight.impl.Flight.FlightInfo.Builder, org.apache.arrow.flight.impl.Flight.FlightInfoOrBuilder>( + getInfo(), + getParentForChildren(), + isClean()); + info_ = null; + } + return infoBuilder_; + } + + private org.apache.arrow.flight.impl.Flight.FlightDescriptor flightDescriptor_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.FlightDescriptor, org.apache.arrow.flight.impl.Flight.FlightDescriptor.Builder, org.apache.arrow.flight.impl.Flight.FlightDescriptorOrBuilder> flightDescriptorBuilder_; + /** + *
+       *
+       * The descriptor the client should use on the next try.
+       * If unset, the query is complete.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + * @return Whether the flightDescriptor field is set. + */ + public boolean hasFlightDescriptor() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+       *
+       * The descriptor the client should use on the next try.
+       * If unset, the query is complete.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + * @return The flightDescriptor. + */ + public org.apache.arrow.flight.impl.Flight.FlightDescriptor getFlightDescriptor() { + if (flightDescriptorBuilder_ == null) { + return flightDescriptor_ == null ? org.apache.arrow.flight.impl.Flight.FlightDescriptor.getDefaultInstance() : flightDescriptor_; + } else { + return flightDescriptorBuilder_.getMessage(); + } + } + /** + *
+       *
+       * The descriptor the client should use on the next try.
+       * If unset, the query is complete.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + */ + public Builder setFlightDescriptor(org.apache.arrow.flight.impl.Flight.FlightDescriptor value) { + if (flightDescriptorBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + flightDescriptor_ = value; + } else { + flightDescriptorBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       *
+       * The descriptor the client should use on the next try.
+       * If unset, the query is complete.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + */ + public Builder setFlightDescriptor( + org.apache.arrow.flight.impl.Flight.FlightDescriptor.Builder builderForValue) { + if (flightDescriptorBuilder_ == null) { + flightDescriptor_ = builderForValue.build(); + } else { + flightDescriptorBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       *
+       * The descriptor the client should use on the next try.
+       * If unset, the query is complete.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + */ + public Builder mergeFlightDescriptor(org.apache.arrow.flight.impl.Flight.FlightDescriptor value) { + if (flightDescriptorBuilder_ == null) { + if (((bitField0_ & 0x00000002) != 0) && + flightDescriptor_ != null && + flightDescriptor_ != org.apache.arrow.flight.impl.Flight.FlightDescriptor.getDefaultInstance()) { + getFlightDescriptorBuilder().mergeFrom(value); + } else { + flightDescriptor_ = value; + } + } else { + flightDescriptorBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       *
+       * The descriptor the client should use on the next try.
+       * If unset, the query is complete.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + */ + public Builder clearFlightDescriptor() { + bitField0_ = (bitField0_ & ~0x00000002); + flightDescriptor_ = null; + if (flightDescriptorBuilder_ != null) { + flightDescriptorBuilder_.dispose(); + flightDescriptorBuilder_ = null; + } + onChanged(); + return this; + } + /** + *
+       *
+       * The descriptor the client should use on the next try.
+       * If unset, the query is complete.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + */ + public org.apache.arrow.flight.impl.Flight.FlightDescriptor.Builder getFlightDescriptorBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getFlightDescriptorFieldBuilder().getBuilder(); + } + /** + *
+       *
+       * The descriptor the client should use on the next try.
+       * If unset, the query is complete.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + */ + public org.apache.arrow.flight.impl.Flight.FlightDescriptorOrBuilder getFlightDescriptorOrBuilder() { + if (flightDescriptorBuilder_ != null) { + return flightDescriptorBuilder_.getMessageOrBuilder(); + } else { + return flightDescriptor_ == null ? + org.apache.arrow.flight.impl.Flight.FlightDescriptor.getDefaultInstance() : flightDescriptor_; + } + } + /** + *
+       *
+       * The descriptor the client should use on the next try.
+       * If unset, the query is complete.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.FlightDescriptor, org.apache.arrow.flight.impl.Flight.FlightDescriptor.Builder, org.apache.arrow.flight.impl.Flight.FlightDescriptorOrBuilder> + getFlightDescriptorFieldBuilder() { + if (flightDescriptorBuilder_ == null) { + flightDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.FlightDescriptor, org.apache.arrow.flight.impl.Flight.FlightDescriptor.Builder, org.apache.arrow.flight.impl.Flight.FlightDescriptorOrBuilder>( + getFlightDescriptor(), + getParentForChildren(), + isClean()); + flightDescriptor_ = null; + } + return flightDescriptorBuilder_; + } + + private double progress_ ; + /** + *
+       *
+       * Query progress. If known, must be in [0.0, 1.0] but need not be
+       * monotonic or nondecreasing. If unknown, do not set.
+       * 
+ * + * optional double progress = 3; + * @return Whether the progress field is set. + */ + @java.lang.Override + public boolean hasProgress() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + *
+       *
+       * Query progress. If known, must be in [0.0, 1.0] but need not be
+       * monotonic or nondecreasing. If unknown, do not set.
+       * 
+ * + * optional double progress = 3; + * @return The progress. + */ + @java.lang.Override + public double getProgress() { + return progress_; + } + /** + *
+       *
+       * Query progress. If known, must be in [0.0, 1.0] but need not be
+       * monotonic or nondecreasing. If unknown, do not set.
+       * 
+ * + * optional double progress = 3; + * @param value The progress to set. + * @return This builder for chaining. + */ + public Builder setProgress(double value) { + + progress_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       *
+       * Query progress. If known, must be in [0.0, 1.0] but need not be
+       * monotonic or nondecreasing. If unknown, do not set.
+       * 
+ * + * optional double progress = 3; + * @return This builder for chaining. + */ + public Builder clearProgress() { + bitField0_ = (bitField0_ & ~0x00000004); + progress_ = 0D; + onChanged(); + return this; + } + + private com.google.protobuf.Timestamp expirationTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> expirationTimeBuilder_; + /** + *
+       *
+       * Expiration time for this request. After this passes, the server
+       * might not accept the retry descriptor anymore (and the query may
+       * be cancelled). This may be updated on a call to PollFlightInfo.
+       * 
+ * + * .google.protobuf.Timestamp expiration_time = 4; + * @return Whether the expirationTime field is set. + */ + public boolean hasExpirationTime() { + return ((bitField0_ & 0x00000008) != 0); + } + /** + *
+       *
+       * Expiration time for this request. After this passes, the server
+       * might not accept the retry descriptor anymore (and the query may
+       * be cancelled). This may be updated on a call to PollFlightInfo.
+       * 
+ * + * .google.protobuf.Timestamp expiration_time = 4; + * @return The expirationTime. + */ + public com.google.protobuf.Timestamp getExpirationTime() { + if (expirationTimeBuilder_ == null) { + return expirationTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : expirationTime_; + } else { + return expirationTimeBuilder_.getMessage(); + } + } + /** + *
+       *
+       * Expiration time for this request. After this passes, the server
+       * might not accept the retry descriptor anymore (and the query may
+       * be cancelled). This may be updated on a call to PollFlightInfo.
+       * 
+ * + * .google.protobuf.Timestamp expiration_time = 4; + */ + public Builder setExpirationTime(com.google.protobuf.Timestamp value) { + if (expirationTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + expirationTime_ = value; + } else { + expirationTimeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + *
+       *
+       * Expiration time for this request. After this passes, the server
+       * might not accept the retry descriptor anymore (and the query may
+       * be cancelled). This may be updated on a call to PollFlightInfo.
+       * 
+ * + * .google.protobuf.Timestamp expiration_time = 4; + */ + public Builder setExpirationTime( + com.google.protobuf.Timestamp.Builder builderForValue) { + if (expirationTimeBuilder_ == null) { + expirationTime_ = builderForValue.build(); + } else { + expirationTimeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + *
+       *
+       * Expiration time for this request. After this passes, the server
+       * might not accept the retry descriptor anymore (and the query may
+       * be cancelled). This may be updated on a call to PollFlightInfo.
+       * 
+ * + * .google.protobuf.Timestamp expiration_time = 4; + */ + public Builder mergeExpirationTime(com.google.protobuf.Timestamp value) { + if (expirationTimeBuilder_ == null) { + if (((bitField0_ & 0x00000008) != 0) && + expirationTime_ != null && + expirationTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { + getExpirationTimeBuilder().mergeFrom(value); + } else { + expirationTime_ = value; + } + } else { + expirationTimeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + *
+       *
+       * Expiration time for this request. After this passes, the server
+       * might not accept the retry descriptor anymore (and the query may
+       * be cancelled). This may be updated on a call to PollFlightInfo.
+       * 
+ * + * .google.protobuf.Timestamp expiration_time = 4; + */ + public Builder clearExpirationTime() { + bitField0_ = (bitField0_ & ~0x00000008); + expirationTime_ = null; + if (expirationTimeBuilder_ != null) { + expirationTimeBuilder_.dispose(); + expirationTimeBuilder_ = null; + } + onChanged(); + return this; + } + /** + *
+       *
+       * Expiration time for this request. After this passes, the server
+       * might not accept the retry descriptor anymore (and the query may
+       * be cancelled). This may be updated on a call to PollFlightInfo.
+       * 
+ * + * .google.protobuf.Timestamp expiration_time = 4; + */ + public com.google.protobuf.Timestamp.Builder getExpirationTimeBuilder() { + bitField0_ |= 0x00000008; + onChanged(); + return getExpirationTimeFieldBuilder().getBuilder(); + } + /** + *
+       *
+       * Expiration time for this request. After this passes, the server
+       * might not accept the retry descriptor anymore (and the query may
+       * be cancelled). This may be updated on a call to PollFlightInfo.
+       * 
+ * + * .google.protobuf.Timestamp expiration_time = 4; + */ + public com.google.protobuf.TimestampOrBuilder getExpirationTimeOrBuilder() { + if (expirationTimeBuilder_ != null) { + return expirationTimeBuilder_.getMessageOrBuilder(); + } else { + return expirationTime_ == null ? + com.google.protobuf.Timestamp.getDefaultInstance() : expirationTime_; + } + } + /** + *
+       *
+       * Expiration time for this request. After this passes, the server
+       * might not accept the retry descriptor anymore (and the query may
+       * be cancelled). This may be updated on a call to PollFlightInfo.
+       * 
+ * + * .google.protobuf.Timestamp expiration_time = 4; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> + getExpirationTimeFieldBuilder() { + if (expirationTimeBuilder_ == null) { + expirationTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( + getExpirationTime(), + getParentForChildren(), + isClean()); + expirationTime_ = null; + } + return expirationTimeBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.PollInfo) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.PollInfo) + private static final org.apache.arrow.flight.impl.Flight.PollInfo DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.PollInfo(); + } + + public static org.apache.arrow.flight.impl.Flight.PollInfo getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public PollInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.PollInfo getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface FlightEndpointOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.FlightEndpoint) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     *
+     * Token used to retrieve this stream.
+     * 
+ * + * .arrow.flight.protocol.Ticket ticket = 1; + * @return Whether the ticket field is set. + */ + boolean hasTicket(); + /** + *
+     *
+     * Token used to retrieve this stream.
+     * 
+ * + * .arrow.flight.protocol.Ticket ticket = 1; + * @return The ticket. + */ + org.apache.arrow.flight.impl.Flight.Ticket getTicket(); + /** + *
+     *
+     * Token used to retrieve this stream.
+     * 
+ * + * .arrow.flight.protocol.Ticket ticket = 1; + */ + org.apache.arrow.flight.impl.Flight.TicketOrBuilder getTicketOrBuilder(); + + /** + *
+     *
+     * A list of URIs where this ticket can be redeemed via DoGet().
+     *
+     * If the list is empty, the expectation is that the ticket can only
+     * be redeemed on the current service where the ticket was
+     * generated.
+     *
+     * If the list is not empty, the expectation is that the ticket can be
+     * redeemed at any of the locations, and that the data returned will be
+     * equivalent. In this case, the ticket may only be redeemed at one of the
+     * given locations, and not (necessarily) on the current service. If one
+     * of the given locations is "arrow-flight-reuse-connection://?", the
+     * client may redeem the ticket on the service where the ticket was
+     * generated (i.e., the same as above), in addition to the other
+     * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+     * or 'scheme://' are not accepted by Java's java.net.URI.)
+     *
+     * In other words, an application can use multiple locations to
+     * represent redundant and/or load balanced services.
+     * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + java.util.List + getLocationList(); + /** + *
+     *
+     * A list of URIs where this ticket can be redeemed via DoGet().
+     *
+     * If the list is empty, the expectation is that the ticket can only
+     * be redeemed on the current service where the ticket was
+     * generated.
+     *
+     * If the list is not empty, the expectation is that the ticket can be
+     * redeemed at any of the locations, and that the data returned will be
+     * equivalent. In this case, the ticket may only be redeemed at one of the
+     * given locations, and not (necessarily) on the current service. If one
+     * of the given locations is "arrow-flight-reuse-connection://?", the
+     * client may redeem the ticket on the service where the ticket was
+     * generated (i.e., the same as above), in addition to the other
+     * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+     * or 'scheme://' are not accepted by Java's java.net.URI.)
+     *
+     * In other words, an application can use multiple locations to
+     * represent redundant and/or load balanced services.
+     * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + org.apache.arrow.flight.impl.Flight.Location getLocation(int index); + /** + *
+     *
+     * A list of URIs where this ticket can be redeemed via DoGet().
+     *
+     * If the list is empty, the expectation is that the ticket can only
+     * be redeemed on the current service where the ticket was
+     * generated.
+     *
+     * If the list is not empty, the expectation is that the ticket can be
+     * redeemed at any of the locations, and that the data returned will be
+     * equivalent. In this case, the ticket may only be redeemed at one of the
+     * given locations, and not (necessarily) on the current service. If one
+     * of the given locations is "arrow-flight-reuse-connection://?", the
+     * client may redeem the ticket on the service where the ticket was
+     * generated (i.e., the same as above), in addition to the other
+     * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+     * or 'scheme://' are not accepted by Java's java.net.URI.)
+     *
+     * In other words, an application can use multiple locations to
+     * represent redundant and/or load balanced services.
+     * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + int getLocationCount(); + /** + *
+     *
+     * A list of URIs where this ticket can be redeemed via DoGet().
+     *
+     * If the list is empty, the expectation is that the ticket can only
+     * be redeemed on the current service where the ticket was
+     * generated.
+     *
+     * If the list is not empty, the expectation is that the ticket can be
+     * redeemed at any of the locations, and that the data returned will be
+     * equivalent. In this case, the ticket may only be redeemed at one of the
+     * given locations, and not (necessarily) on the current service. If one
+     * of the given locations is "arrow-flight-reuse-connection://?", the
+     * client may redeem the ticket on the service where the ticket was
+     * generated (i.e., the same as above), in addition to the other
+     * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+     * or 'scheme://' are not accepted by Java's java.net.URI.)
+     *
+     * In other words, an application can use multiple locations to
+     * represent redundant and/or load balanced services.
+     * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + java.util.List + getLocationOrBuilderList(); + /** + *
+     *
+     * A list of URIs where this ticket can be redeemed via DoGet().
+     *
+     * If the list is empty, the expectation is that the ticket can only
+     * be redeemed on the current service where the ticket was
+     * generated.
+     *
+     * If the list is not empty, the expectation is that the ticket can be
+     * redeemed at any of the locations, and that the data returned will be
+     * equivalent. In this case, the ticket may only be redeemed at one of the
+     * given locations, and not (necessarily) on the current service. If one
+     * of the given locations is "arrow-flight-reuse-connection://?", the
+     * client may redeem the ticket on the service where the ticket was
+     * generated (i.e., the same as above), in addition to the other
+     * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+     * or 'scheme://' are not accepted by Java's java.net.URI.)
+     *
+     * In other words, an application can use multiple locations to
+     * represent redundant and/or load balanced services.
+     * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + org.apache.arrow.flight.impl.Flight.LocationOrBuilder getLocationOrBuilder( + int index); + + /** + *
+     *
+     * Expiration time of this stream. If present, clients may assume
+     * they can retry DoGet requests. Otherwise, it is
+     * application-defined whether DoGet requests may be retried.
+     * 
+ * + * .google.protobuf.Timestamp expiration_time = 3; + * @return Whether the expirationTime field is set. + */ + boolean hasExpirationTime(); + /** + *
+     *
+     * Expiration time of this stream. If present, clients may assume
+     * they can retry DoGet requests. Otherwise, it is
+     * application-defined whether DoGet requests may be retried.
+     * 
+ * + * .google.protobuf.Timestamp expiration_time = 3; + * @return The expirationTime. + */ + com.google.protobuf.Timestamp getExpirationTime(); + /** + *
+     *
+     * Expiration time of this stream. If present, clients may assume
+     * they can retry DoGet requests. Otherwise, it is
+     * application-defined whether DoGet requests may be retried.
+     * 
+ * + * .google.protobuf.Timestamp expiration_time = 3; + */ + com.google.protobuf.TimestampOrBuilder getExpirationTimeOrBuilder(); + + /** + *
+     *
+     * Application-defined metadata.
+     *
+     * There is no inherent or required relationship between this
+     * and the app_metadata fields in the FlightInfo or resulting
+     * FlightData messages. Since this metadata is application-defined,
+     * a given application could define there to be a relationship,
+     * but there is none required by the spec.
+     * 
+ * + * bytes app_metadata = 4; + * @return The appMetadata. + */ + com.google.protobuf.ByteString getAppMetadata(); + } + /** + *
+   *
+   * A particular stream or split associated with a flight.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.FlightEndpoint} + */ + public static final class FlightEndpoint extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.FlightEndpoint) + FlightEndpointOrBuilder { + private static final long serialVersionUID = 0L; + // Use FlightEndpoint.newBuilder() to construct. + private FlightEndpoint(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private FlightEndpoint() { + location_ = java.util.Collections.emptyList(); + appMetadata_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new FlightEndpoint(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_FlightEndpoint_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_FlightEndpoint_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.FlightEndpoint.class, org.apache.arrow.flight.impl.Flight.FlightEndpoint.Builder.class); + } + + public static final int TICKET_FIELD_NUMBER = 1; + private org.apache.arrow.flight.impl.Flight.Ticket ticket_; + /** + *
+     *
+     * Token used to retrieve this stream.
+     * 
+ * + * .arrow.flight.protocol.Ticket ticket = 1; + * @return Whether the ticket field is set. + */ + @java.lang.Override + public boolean hasTicket() { + return ticket_ != null; + } + /** + *
+     *
+     * Token used to retrieve this stream.
+     * 
+ * + * .arrow.flight.protocol.Ticket ticket = 1; + * @return The ticket. + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Ticket getTicket() { + return ticket_ == null ? org.apache.arrow.flight.impl.Flight.Ticket.getDefaultInstance() : ticket_; + } + /** + *
+     *
+     * Token used to retrieve this stream.
+     * 
+ * + * .arrow.flight.protocol.Ticket ticket = 1; + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.TicketOrBuilder getTicketOrBuilder() { + return ticket_ == null ? org.apache.arrow.flight.impl.Flight.Ticket.getDefaultInstance() : ticket_; + } + + public static final int LOCATION_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private java.util.List location_; + /** + *
+     *
+     * A list of URIs where this ticket can be redeemed via DoGet().
+     *
+     * If the list is empty, the expectation is that the ticket can only
+     * be redeemed on the current service where the ticket was
+     * generated.
+     *
+     * If the list is not empty, the expectation is that the ticket can be
+     * redeemed at any of the locations, and that the data returned will be
+     * equivalent. In this case, the ticket may only be redeemed at one of the
+     * given locations, and not (necessarily) on the current service. If one
+     * of the given locations is "arrow-flight-reuse-connection://?", the
+     * client may redeem the ticket on the service where the ticket was
+     * generated (i.e., the same as above), in addition to the other
+     * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+     * or 'scheme://' are not accepted by Java's java.net.URI.)
+     *
+     * In other words, an application can use multiple locations to
+     * represent redundant and/or load balanced services.
+     * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + @java.lang.Override + public java.util.List getLocationList() { + return location_; + } + /** + *
+     *
+     * A list of URIs where this ticket can be redeemed via DoGet().
+     *
+     * If the list is empty, the expectation is that the ticket can only
+     * be redeemed on the current service where the ticket was
+     * generated.
+     *
+     * If the list is not empty, the expectation is that the ticket can be
+     * redeemed at any of the locations, and that the data returned will be
+     * equivalent. In this case, the ticket may only be redeemed at one of the
+     * given locations, and not (necessarily) on the current service. If one
+     * of the given locations is "arrow-flight-reuse-connection://?", the
+     * client may redeem the ticket on the service where the ticket was
+     * generated (i.e., the same as above), in addition to the other
+     * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+     * or 'scheme://' are not accepted by Java's java.net.URI.)
+     *
+     * In other words, an application can use multiple locations to
+     * represent redundant and/or load balanced services.
+     * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + @java.lang.Override + public java.util.List + getLocationOrBuilderList() { + return location_; + } + /** + *
+     *
+     * A list of URIs where this ticket can be redeemed via DoGet().
+     *
+     * If the list is empty, the expectation is that the ticket can only
+     * be redeemed on the current service where the ticket was
+     * generated.
+     *
+     * If the list is not empty, the expectation is that the ticket can be
+     * redeemed at any of the locations, and that the data returned will be
+     * equivalent. In this case, the ticket may only be redeemed at one of the
+     * given locations, and not (necessarily) on the current service. If one
+     * of the given locations is "arrow-flight-reuse-connection://?", the
+     * client may redeem the ticket on the service where the ticket was
+     * generated (i.e., the same as above), in addition to the other
+     * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+     * or 'scheme://' are not accepted by Java's java.net.URI.)
+     *
+     * In other words, an application can use multiple locations to
+     * represent redundant and/or load balanced services.
+     * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + @java.lang.Override + public int getLocationCount() { + return location_.size(); + } + /** + *
+     *
+     * A list of URIs where this ticket can be redeemed via DoGet().
+     *
+     * If the list is empty, the expectation is that the ticket can only
+     * be redeemed on the current service where the ticket was
+     * generated.
+     *
+     * If the list is not empty, the expectation is that the ticket can be
+     * redeemed at any of the locations, and that the data returned will be
+     * equivalent. In this case, the ticket may only be redeemed at one of the
+     * given locations, and not (necessarily) on the current service. If one
+     * of the given locations is "arrow-flight-reuse-connection://?", the
+     * client may redeem the ticket on the service where the ticket was
+     * generated (i.e., the same as above), in addition to the other
+     * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+     * or 'scheme://' are not accepted by Java's java.net.URI.)
+     *
+     * In other words, an application can use multiple locations to
+     * represent redundant and/or load balanced services.
+     * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Location getLocation(int index) { + return location_.get(index); + } + /** + *
+     *
+     * A list of URIs where this ticket can be redeemed via DoGet().
+     *
+     * If the list is empty, the expectation is that the ticket can only
+     * be redeemed on the current service where the ticket was
+     * generated.
+     *
+     * If the list is not empty, the expectation is that the ticket can be
+     * redeemed at any of the locations, and that the data returned will be
+     * equivalent. In this case, the ticket may only be redeemed at one of the
+     * given locations, and not (necessarily) on the current service. If one
+     * of the given locations is "arrow-flight-reuse-connection://?", the
+     * client may redeem the ticket on the service where the ticket was
+     * generated (i.e., the same as above), in addition to the other
+     * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+     * or 'scheme://' are not accepted by Java's java.net.URI.)
+     *
+     * In other words, an application can use multiple locations to
+     * represent redundant and/or load balanced services.
+     * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.LocationOrBuilder getLocationOrBuilder( + int index) { + return location_.get(index); + } + + public static final int EXPIRATION_TIME_FIELD_NUMBER = 3; + private com.google.protobuf.Timestamp expirationTime_; + /** + *
+     *
+     * Expiration time of this stream. If present, clients may assume
+     * they can retry DoGet requests. Otherwise, it is
+     * application-defined whether DoGet requests may be retried.
+     * 
+ * + * .google.protobuf.Timestamp expiration_time = 3; + * @return Whether the expirationTime field is set. + */ + @java.lang.Override + public boolean hasExpirationTime() { + return expirationTime_ != null; + } + /** + *
+     *
+     * Expiration time of this stream. If present, clients may assume
+     * they can retry DoGet requests. Otherwise, it is
+     * application-defined whether DoGet requests may be retried.
+     * 
+ * + * .google.protobuf.Timestamp expiration_time = 3; + * @return The expirationTime. + */ + @java.lang.Override + public com.google.protobuf.Timestamp getExpirationTime() { + return expirationTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : expirationTime_; + } + /** + *
+     *
+     * Expiration time of this stream. If present, clients may assume
+     * they can retry DoGet requests. Otherwise, it is
+     * application-defined whether DoGet requests may be retried.
+     * 
+ * + * .google.protobuf.Timestamp expiration_time = 3; + */ + @java.lang.Override + public com.google.protobuf.TimestampOrBuilder getExpirationTimeOrBuilder() { + return expirationTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : expirationTime_; + } + + public static final int APP_METADATA_FIELD_NUMBER = 4; + private com.google.protobuf.ByteString appMetadata_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     *
+     * Application-defined metadata.
+     *
+     * There is no inherent or required relationship between this
+     * and the app_metadata fields in the FlightInfo or resulting
+     * FlightData messages. Since this metadata is application-defined,
+     * a given application could define there to be a relationship,
+     * but there is none required by the spec.
+     * 
+ * + * bytes app_metadata = 4; + * @return The appMetadata. + */ + @java.lang.Override + public com.google.protobuf.ByteString getAppMetadata() { + return appMetadata_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (ticket_ != null) { + output.writeMessage(1, getTicket()); + } + for (int i = 0; i < location_.size(); i++) { + output.writeMessage(2, location_.get(i)); + } + if (expirationTime_ != null) { + output.writeMessage(3, getExpirationTime()); + } + if (!appMetadata_.isEmpty()) { + output.writeBytes(4, appMetadata_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (ticket_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getTicket()); + } + for (int i = 0; i < location_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, location_.get(i)); + } + if (expirationTime_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, getExpirationTime()); + } + if (!appMetadata_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, appMetadata_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.FlightEndpoint)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.FlightEndpoint other = (org.apache.arrow.flight.impl.Flight.FlightEndpoint) obj; + + if (hasTicket() != other.hasTicket()) return false; + if (hasTicket()) { + if (!getTicket() + .equals(other.getTicket())) return false; + } + if (!getLocationList() + .equals(other.getLocationList())) return false; + if (hasExpirationTime() != other.hasExpirationTime()) return false; + if (hasExpirationTime()) { + if (!getExpirationTime() + .equals(other.getExpirationTime())) return false; + } + if (!getAppMetadata() + .equals(other.getAppMetadata())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasTicket()) { + hash = (37 * hash) + TICKET_FIELD_NUMBER; + hash = (53 * hash) + getTicket().hashCode(); + } + if (getLocationCount() > 0) { + hash = (37 * hash) + LOCATION_FIELD_NUMBER; + hash = (53 * hash) + getLocationList().hashCode(); + } + if (hasExpirationTime()) { + hash = (37 * hash) + EXPIRATION_TIME_FIELD_NUMBER; + hash = (53 * hash) + getExpirationTime().hashCode(); + } + hash = (37 * hash) + APP_METADATA_FIELD_NUMBER; + hash = (53 * hash) + getAppMetadata().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.FlightEndpoint parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.FlightEndpoint parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.FlightEndpoint parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.FlightEndpoint parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.FlightEndpoint parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.FlightEndpoint parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.FlightEndpoint parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.FlightEndpoint parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.FlightEndpoint parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.FlightEndpoint parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.FlightEndpoint parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.FlightEndpoint parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.FlightEndpoint prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * A particular stream or split associated with a flight.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.FlightEndpoint} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.FlightEndpoint) + org.apache.arrow.flight.impl.Flight.FlightEndpointOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_FlightEndpoint_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_FlightEndpoint_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.FlightEndpoint.class, org.apache.arrow.flight.impl.Flight.FlightEndpoint.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.FlightEndpoint.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + ticket_ = null; + if (ticketBuilder_ != null) { + ticketBuilder_.dispose(); + ticketBuilder_ = null; + } + if (locationBuilder_ == null) { + location_ = java.util.Collections.emptyList(); + } else { + location_ = null; + locationBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + expirationTime_ = null; + if (expirationTimeBuilder_ != null) { + expirationTimeBuilder_.dispose(); + expirationTimeBuilder_ = null; + } + appMetadata_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_FlightEndpoint_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightEndpoint getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.FlightEndpoint.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightEndpoint build() { + org.apache.arrow.flight.impl.Flight.FlightEndpoint result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightEndpoint buildPartial() { + org.apache.arrow.flight.impl.Flight.FlightEndpoint result = new org.apache.arrow.flight.impl.Flight.FlightEndpoint(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields(org.apache.arrow.flight.impl.Flight.FlightEndpoint result) { + if (locationBuilder_ == null) { + if (((bitField0_ & 0x00000002) != 0)) { + location_ = java.util.Collections.unmodifiableList(location_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.location_ = location_; + } else { + result.location_ = locationBuilder_.build(); + } + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.FlightEndpoint result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.ticket_ = ticketBuilder_ == null + ? ticket_ + : ticketBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.expirationTime_ = expirationTimeBuilder_ == null + ? expirationTime_ + : expirationTimeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.appMetadata_ = appMetadata_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.FlightEndpoint) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.FlightEndpoint)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.FlightEndpoint other) { + if (other == org.apache.arrow.flight.impl.Flight.FlightEndpoint.getDefaultInstance()) return this; + if (other.hasTicket()) { + mergeTicket(other.getTicket()); + } + if (locationBuilder_ == null) { + if (!other.location_.isEmpty()) { + if (location_.isEmpty()) { + location_ = other.location_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureLocationIsMutable(); + location_.addAll(other.location_); + } + onChanged(); + } + } else { + if (!other.location_.isEmpty()) { + if (locationBuilder_.isEmpty()) { + locationBuilder_.dispose(); + locationBuilder_ = null; + location_ = other.location_; + bitField0_ = (bitField0_ & ~0x00000002); + locationBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getLocationFieldBuilder() : null; + } else { + locationBuilder_.addAllMessages(other.location_); + } + } + } + if (other.hasExpirationTime()) { + mergeExpirationTime(other.getExpirationTime()); + } + if (other.getAppMetadata() != com.google.protobuf.ByteString.EMPTY) { + setAppMetadata(other.getAppMetadata()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + input.readMessage( + getTicketFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + org.apache.arrow.flight.impl.Flight.Location m = + input.readMessage( + org.apache.arrow.flight.impl.Flight.Location.parser(), + extensionRegistry); + if (locationBuilder_ == null) { + ensureLocationIsMutable(); + location_.add(m); + } else { + locationBuilder_.addMessage(m); + } + break; + } // case 18 + case 26: { + input.readMessage( + getExpirationTimeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000004; + break; + } // case 26 + case 34: { + appMetadata_ = input.readBytes(); + bitField0_ |= 0x00000008; + break; + } // case 34 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private org.apache.arrow.flight.impl.Flight.Ticket ticket_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.Ticket, org.apache.arrow.flight.impl.Flight.Ticket.Builder, org.apache.arrow.flight.impl.Flight.TicketOrBuilder> ticketBuilder_; + /** + *
+       *
+       * Token used to retrieve this stream.
+       * 
+ * + * .arrow.flight.protocol.Ticket ticket = 1; + * @return Whether the ticket field is set. + */ + public boolean hasTicket() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+       *
+       * Token used to retrieve this stream.
+       * 
+ * + * .arrow.flight.protocol.Ticket ticket = 1; + * @return The ticket. + */ + public org.apache.arrow.flight.impl.Flight.Ticket getTicket() { + if (ticketBuilder_ == null) { + return ticket_ == null ? org.apache.arrow.flight.impl.Flight.Ticket.getDefaultInstance() : ticket_; + } else { + return ticketBuilder_.getMessage(); + } + } + /** + *
+       *
+       * Token used to retrieve this stream.
+       * 
+ * + * .arrow.flight.protocol.Ticket ticket = 1; + */ + public Builder setTicket(org.apache.arrow.flight.impl.Flight.Ticket value) { + if (ticketBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ticket_ = value; + } else { + ticketBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       *
+       * Token used to retrieve this stream.
+       * 
+ * + * .arrow.flight.protocol.Ticket ticket = 1; + */ + public Builder setTicket( + org.apache.arrow.flight.impl.Flight.Ticket.Builder builderForValue) { + if (ticketBuilder_ == null) { + ticket_ = builderForValue.build(); + } else { + ticketBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       *
+       * Token used to retrieve this stream.
+       * 
+ * + * .arrow.flight.protocol.Ticket ticket = 1; + */ + public Builder mergeTicket(org.apache.arrow.flight.impl.Flight.Ticket value) { + if (ticketBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0) && + ticket_ != null && + ticket_ != org.apache.arrow.flight.impl.Flight.Ticket.getDefaultInstance()) { + getTicketBuilder().mergeFrom(value); + } else { + ticket_ = value; + } + } else { + ticketBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       *
+       * Token used to retrieve this stream.
+       * 
+ * + * .arrow.flight.protocol.Ticket ticket = 1; + */ + public Builder clearTicket() { + bitField0_ = (bitField0_ & ~0x00000001); + ticket_ = null; + if (ticketBuilder_ != null) { + ticketBuilder_.dispose(); + ticketBuilder_ = null; + } + onChanged(); + return this; + } + /** + *
+       *
+       * Token used to retrieve this stream.
+       * 
+ * + * .arrow.flight.protocol.Ticket ticket = 1; + */ + public org.apache.arrow.flight.impl.Flight.Ticket.Builder getTicketBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getTicketFieldBuilder().getBuilder(); + } + /** + *
+       *
+       * Token used to retrieve this stream.
+       * 
+ * + * .arrow.flight.protocol.Ticket ticket = 1; + */ + public org.apache.arrow.flight.impl.Flight.TicketOrBuilder getTicketOrBuilder() { + if (ticketBuilder_ != null) { + return ticketBuilder_.getMessageOrBuilder(); + } else { + return ticket_ == null ? + org.apache.arrow.flight.impl.Flight.Ticket.getDefaultInstance() : ticket_; + } + } + /** + *
+       *
+       * Token used to retrieve this stream.
+       * 
+ * + * .arrow.flight.protocol.Ticket ticket = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.Ticket, org.apache.arrow.flight.impl.Flight.Ticket.Builder, org.apache.arrow.flight.impl.Flight.TicketOrBuilder> + getTicketFieldBuilder() { + if (ticketBuilder_ == null) { + ticketBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.Ticket, org.apache.arrow.flight.impl.Flight.Ticket.Builder, org.apache.arrow.flight.impl.Flight.TicketOrBuilder>( + getTicket(), + getParentForChildren(), + isClean()); + ticket_ = null; + } + return ticketBuilder_; + } + + private java.util.List location_ = + java.util.Collections.emptyList(); + private void ensureLocationIsMutable() { + if (!((bitField0_ & 0x00000002) != 0)) { + location_ = new java.util.ArrayList(location_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.Location, org.apache.arrow.flight.impl.Flight.Location.Builder, org.apache.arrow.flight.impl.Flight.LocationOrBuilder> locationBuilder_; + + /** + *
+       *
+       * A list of URIs where this ticket can be redeemed via DoGet().
+       *
+       * If the list is empty, the expectation is that the ticket can only
+       * be redeemed on the current service where the ticket was
+       * generated.
+       *
+       * If the list is not empty, the expectation is that the ticket can be
+       * redeemed at any of the locations, and that the data returned will be
+       * equivalent. In this case, the ticket may only be redeemed at one of the
+       * given locations, and not (necessarily) on the current service. If one
+       * of the given locations is "arrow-flight-reuse-connection://?", the
+       * client may redeem the ticket on the service where the ticket was
+       * generated (i.e., the same as above), in addition to the other
+       * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+       * or 'scheme://' are not accepted by Java's java.net.URI.)
+       *
+       * In other words, an application can use multiple locations to
+       * represent redundant and/or load balanced services.
+       * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + public java.util.List getLocationList() { + if (locationBuilder_ == null) { + return java.util.Collections.unmodifiableList(location_); + } else { + return locationBuilder_.getMessageList(); + } + } + /** + *
+       *
+       * A list of URIs where this ticket can be redeemed via DoGet().
+       *
+       * If the list is empty, the expectation is that the ticket can only
+       * be redeemed on the current service where the ticket was
+       * generated.
+       *
+       * If the list is not empty, the expectation is that the ticket can be
+       * redeemed at any of the locations, and that the data returned will be
+       * equivalent. In this case, the ticket may only be redeemed at one of the
+       * given locations, and not (necessarily) on the current service. If one
+       * of the given locations is "arrow-flight-reuse-connection://?", the
+       * client may redeem the ticket on the service where the ticket was
+       * generated (i.e., the same as above), in addition to the other
+       * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+       * or 'scheme://' are not accepted by Java's java.net.URI.)
+       *
+       * In other words, an application can use multiple locations to
+       * represent redundant and/or load balanced services.
+       * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + public int getLocationCount() { + if (locationBuilder_ == null) { + return location_.size(); + } else { + return locationBuilder_.getCount(); + } + } + /** + *
+       *
+       * A list of URIs where this ticket can be redeemed via DoGet().
+       *
+       * If the list is empty, the expectation is that the ticket can only
+       * be redeemed on the current service where the ticket was
+       * generated.
+       *
+       * If the list is not empty, the expectation is that the ticket can be
+       * redeemed at any of the locations, and that the data returned will be
+       * equivalent. In this case, the ticket may only be redeemed at one of the
+       * given locations, and not (necessarily) on the current service. If one
+       * of the given locations is "arrow-flight-reuse-connection://?", the
+       * client may redeem the ticket on the service where the ticket was
+       * generated (i.e., the same as above), in addition to the other
+       * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+       * or 'scheme://' are not accepted by Java's java.net.URI.)
+       *
+       * In other words, an application can use multiple locations to
+       * represent redundant and/or load balanced services.
+       * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + public org.apache.arrow.flight.impl.Flight.Location getLocation(int index) { + if (locationBuilder_ == null) { + return location_.get(index); + } else { + return locationBuilder_.getMessage(index); + } + } + /** + *
+       *
+       * A list of URIs where this ticket can be redeemed via DoGet().
+       *
+       * If the list is empty, the expectation is that the ticket can only
+       * be redeemed on the current service where the ticket was
+       * generated.
+       *
+       * If the list is not empty, the expectation is that the ticket can be
+       * redeemed at any of the locations, and that the data returned will be
+       * equivalent. In this case, the ticket may only be redeemed at one of the
+       * given locations, and not (necessarily) on the current service. If one
+       * of the given locations is "arrow-flight-reuse-connection://?", the
+       * client may redeem the ticket on the service where the ticket was
+       * generated (i.e., the same as above), in addition to the other
+       * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+       * or 'scheme://' are not accepted by Java's java.net.URI.)
+       *
+       * In other words, an application can use multiple locations to
+       * represent redundant and/or load balanced services.
+       * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + public Builder setLocation( + int index, org.apache.arrow.flight.impl.Flight.Location value) { + if (locationBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureLocationIsMutable(); + location_.set(index, value); + onChanged(); + } else { + locationBuilder_.setMessage(index, value); + } + return this; + } + /** + *
+       *
+       * A list of URIs where this ticket can be redeemed via DoGet().
+       *
+       * If the list is empty, the expectation is that the ticket can only
+       * be redeemed on the current service where the ticket was
+       * generated.
+       *
+       * If the list is not empty, the expectation is that the ticket can be
+       * redeemed at any of the locations, and that the data returned will be
+       * equivalent. In this case, the ticket may only be redeemed at one of the
+       * given locations, and not (necessarily) on the current service. If one
+       * of the given locations is "arrow-flight-reuse-connection://?", the
+       * client may redeem the ticket on the service where the ticket was
+       * generated (i.e., the same as above), in addition to the other
+       * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+       * or 'scheme://' are not accepted by Java's java.net.URI.)
+       *
+       * In other words, an application can use multiple locations to
+       * represent redundant and/or load balanced services.
+       * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + public Builder setLocation( + int index, org.apache.arrow.flight.impl.Flight.Location.Builder builderForValue) { + if (locationBuilder_ == null) { + ensureLocationIsMutable(); + location_.set(index, builderForValue.build()); + onChanged(); + } else { + locationBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+       *
+       * A list of URIs where this ticket can be redeemed via DoGet().
+       *
+       * If the list is empty, the expectation is that the ticket can only
+       * be redeemed on the current service where the ticket was
+       * generated.
+       *
+       * If the list is not empty, the expectation is that the ticket can be
+       * redeemed at any of the locations, and that the data returned will be
+       * equivalent. In this case, the ticket may only be redeemed at one of the
+       * given locations, and not (necessarily) on the current service. If one
+       * of the given locations is "arrow-flight-reuse-connection://?", the
+       * client may redeem the ticket on the service where the ticket was
+       * generated (i.e., the same as above), in addition to the other
+       * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+       * or 'scheme://' are not accepted by Java's java.net.URI.)
+       *
+       * In other words, an application can use multiple locations to
+       * represent redundant and/or load balanced services.
+       * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + public Builder addLocation(org.apache.arrow.flight.impl.Flight.Location value) { + if (locationBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureLocationIsMutable(); + location_.add(value); + onChanged(); + } else { + locationBuilder_.addMessage(value); + } + return this; + } + /** + *
+       *
+       * A list of URIs where this ticket can be redeemed via DoGet().
+       *
+       * If the list is empty, the expectation is that the ticket can only
+       * be redeemed on the current service where the ticket was
+       * generated.
+       *
+       * If the list is not empty, the expectation is that the ticket can be
+       * redeemed at any of the locations, and that the data returned will be
+       * equivalent. In this case, the ticket may only be redeemed at one of the
+       * given locations, and not (necessarily) on the current service. If one
+       * of the given locations is "arrow-flight-reuse-connection://?", the
+       * client may redeem the ticket on the service where the ticket was
+       * generated (i.e., the same as above), in addition to the other
+       * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+       * or 'scheme://' are not accepted by Java's java.net.URI.)
+       *
+       * In other words, an application can use multiple locations to
+       * represent redundant and/or load balanced services.
+       * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + public Builder addLocation( + int index, org.apache.arrow.flight.impl.Flight.Location value) { + if (locationBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureLocationIsMutable(); + location_.add(index, value); + onChanged(); + } else { + locationBuilder_.addMessage(index, value); + } + return this; + } + /** + *
+       *
+       * A list of URIs where this ticket can be redeemed via DoGet().
+       *
+       * If the list is empty, the expectation is that the ticket can only
+       * be redeemed on the current service where the ticket was
+       * generated.
+       *
+       * If the list is not empty, the expectation is that the ticket can be
+       * redeemed at any of the locations, and that the data returned will be
+       * equivalent. In this case, the ticket may only be redeemed at one of the
+       * given locations, and not (necessarily) on the current service. If one
+       * of the given locations is "arrow-flight-reuse-connection://?", the
+       * client may redeem the ticket on the service where the ticket was
+       * generated (i.e., the same as above), in addition to the other
+       * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+       * or 'scheme://' are not accepted by Java's java.net.URI.)
+       *
+       * In other words, an application can use multiple locations to
+       * represent redundant and/or load balanced services.
+       * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + public Builder addLocation( + org.apache.arrow.flight.impl.Flight.Location.Builder builderForValue) { + if (locationBuilder_ == null) { + ensureLocationIsMutable(); + location_.add(builderForValue.build()); + onChanged(); + } else { + locationBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + *
+       *
+       * A list of URIs where this ticket can be redeemed via DoGet().
+       *
+       * If the list is empty, the expectation is that the ticket can only
+       * be redeemed on the current service where the ticket was
+       * generated.
+       *
+       * If the list is not empty, the expectation is that the ticket can be
+       * redeemed at any of the locations, and that the data returned will be
+       * equivalent. In this case, the ticket may only be redeemed at one of the
+       * given locations, and not (necessarily) on the current service. If one
+       * of the given locations is "arrow-flight-reuse-connection://?", the
+       * client may redeem the ticket on the service where the ticket was
+       * generated (i.e., the same as above), in addition to the other
+       * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+       * or 'scheme://' are not accepted by Java's java.net.URI.)
+       *
+       * In other words, an application can use multiple locations to
+       * represent redundant and/or load balanced services.
+       * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + public Builder addLocation( + int index, org.apache.arrow.flight.impl.Flight.Location.Builder builderForValue) { + if (locationBuilder_ == null) { + ensureLocationIsMutable(); + location_.add(index, builderForValue.build()); + onChanged(); + } else { + locationBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+       *
+       * A list of URIs where this ticket can be redeemed via DoGet().
+       *
+       * If the list is empty, the expectation is that the ticket can only
+       * be redeemed on the current service where the ticket was
+       * generated.
+       *
+       * If the list is not empty, the expectation is that the ticket can be
+       * redeemed at any of the locations, and that the data returned will be
+       * equivalent. In this case, the ticket may only be redeemed at one of the
+       * given locations, and not (necessarily) on the current service. If one
+       * of the given locations is "arrow-flight-reuse-connection://?", the
+       * client may redeem the ticket on the service where the ticket was
+       * generated (i.e., the same as above), in addition to the other
+       * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+       * or 'scheme://' are not accepted by Java's java.net.URI.)
+       *
+       * In other words, an application can use multiple locations to
+       * represent redundant and/or load balanced services.
+       * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + public Builder addAllLocation( + java.lang.Iterable values) { + if (locationBuilder_ == null) { + ensureLocationIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, location_); + onChanged(); + } else { + locationBuilder_.addAllMessages(values); + } + return this; + } + /** + *
+       *
+       * A list of URIs where this ticket can be redeemed via DoGet().
+       *
+       * If the list is empty, the expectation is that the ticket can only
+       * be redeemed on the current service where the ticket was
+       * generated.
+       *
+       * If the list is not empty, the expectation is that the ticket can be
+       * redeemed at any of the locations, and that the data returned will be
+       * equivalent. In this case, the ticket may only be redeemed at one of the
+       * given locations, and not (necessarily) on the current service. If one
+       * of the given locations is "arrow-flight-reuse-connection://?", the
+       * client may redeem the ticket on the service where the ticket was
+       * generated (i.e., the same as above), in addition to the other
+       * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+       * or 'scheme://' are not accepted by Java's java.net.URI.)
+       *
+       * In other words, an application can use multiple locations to
+       * represent redundant and/or load balanced services.
+       * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + public Builder clearLocation() { + if (locationBuilder_ == null) { + location_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + locationBuilder_.clear(); + } + return this; + } + /** + *
+       *
+       * A list of URIs where this ticket can be redeemed via DoGet().
+       *
+       * If the list is empty, the expectation is that the ticket can only
+       * be redeemed on the current service where the ticket was
+       * generated.
+       *
+       * If the list is not empty, the expectation is that the ticket can be
+       * redeemed at any of the locations, and that the data returned will be
+       * equivalent. In this case, the ticket may only be redeemed at one of the
+       * given locations, and not (necessarily) on the current service. If one
+       * of the given locations is "arrow-flight-reuse-connection://?", the
+       * client may redeem the ticket on the service where the ticket was
+       * generated (i.e., the same as above), in addition to the other
+       * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+       * or 'scheme://' are not accepted by Java's java.net.URI.)
+       *
+       * In other words, an application can use multiple locations to
+       * represent redundant and/or load balanced services.
+       * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + public Builder removeLocation(int index) { + if (locationBuilder_ == null) { + ensureLocationIsMutable(); + location_.remove(index); + onChanged(); + } else { + locationBuilder_.remove(index); + } + return this; + } + /** + *
+       *
+       * A list of URIs where this ticket can be redeemed via DoGet().
+       *
+       * If the list is empty, the expectation is that the ticket can only
+       * be redeemed on the current service where the ticket was
+       * generated.
+       *
+       * If the list is not empty, the expectation is that the ticket can be
+       * redeemed at any of the locations, and that the data returned will be
+       * equivalent. In this case, the ticket may only be redeemed at one of the
+       * given locations, and not (necessarily) on the current service. If one
+       * of the given locations is "arrow-flight-reuse-connection://?", the
+       * client may redeem the ticket on the service where the ticket was
+       * generated (i.e., the same as above), in addition to the other
+       * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+       * or 'scheme://' are not accepted by Java's java.net.URI.)
+       *
+       * In other words, an application can use multiple locations to
+       * represent redundant and/or load balanced services.
+       * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + public org.apache.arrow.flight.impl.Flight.Location.Builder getLocationBuilder( + int index) { + return getLocationFieldBuilder().getBuilder(index); + } + /** + *
+       *
+       * A list of URIs where this ticket can be redeemed via DoGet().
+       *
+       * If the list is empty, the expectation is that the ticket can only
+       * be redeemed on the current service where the ticket was
+       * generated.
+       *
+       * If the list is not empty, the expectation is that the ticket can be
+       * redeemed at any of the locations, and that the data returned will be
+       * equivalent. In this case, the ticket may only be redeemed at one of the
+       * given locations, and not (necessarily) on the current service. If one
+       * of the given locations is "arrow-flight-reuse-connection://?", the
+       * client may redeem the ticket on the service where the ticket was
+       * generated (i.e., the same as above), in addition to the other
+       * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+       * or 'scheme://' are not accepted by Java's java.net.URI.)
+       *
+       * In other words, an application can use multiple locations to
+       * represent redundant and/or load balanced services.
+       * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + public org.apache.arrow.flight.impl.Flight.LocationOrBuilder getLocationOrBuilder( + int index) { + if (locationBuilder_ == null) { + return location_.get(index); } else { + return locationBuilder_.getMessageOrBuilder(index); + } + } + /** + *
+       *
+       * A list of URIs where this ticket can be redeemed via DoGet().
+       *
+       * If the list is empty, the expectation is that the ticket can only
+       * be redeemed on the current service where the ticket was
+       * generated.
+       *
+       * If the list is not empty, the expectation is that the ticket can be
+       * redeemed at any of the locations, and that the data returned will be
+       * equivalent. In this case, the ticket may only be redeemed at one of the
+       * given locations, and not (necessarily) on the current service. If one
+       * of the given locations is "arrow-flight-reuse-connection://?", the
+       * client may redeem the ticket on the service where the ticket was
+       * generated (i.e., the same as above), in addition to the other
+       * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+       * or 'scheme://' are not accepted by Java's java.net.URI.)
+       *
+       * In other words, an application can use multiple locations to
+       * represent redundant and/or load balanced services.
+       * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + public java.util.List + getLocationOrBuilderList() { + if (locationBuilder_ != null) { + return locationBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(location_); + } + } + /** + *
+       *
+       * A list of URIs where this ticket can be redeemed via DoGet().
+       *
+       * If the list is empty, the expectation is that the ticket can only
+       * be redeemed on the current service where the ticket was
+       * generated.
+       *
+       * If the list is not empty, the expectation is that the ticket can be
+       * redeemed at any of the locations, and that the data returned will be
+       * equivalent. In this case, the ticket may only be redeemed at one of the
+       * given locations, and not (necessarily) on the current service. If one
+       * of the given locations is "arrow-flight-reuse-connection://?", the
+       * client may redeem the ticket on the service where the ticket was
+       * generated (i.e., the same as above), in addition to the other
+       * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+       * or 'scheme://' are not accepted by Java's java.net.URI.)
+       *
+       * In other words, an application can use multiple locations to
+       * represent redundant and/or load balanced services.
+       * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + public org.apache.arrow.flight.impl.Flight.Location.Builder addLocationBuilder() { + return getLocationFieldBuilder().addBuilder( + org.apache.arrow.flight.impl.Flight.Location.getDefaultInstance()); + } + /** + *
+       *
+       * A list of URIs where this ticket can be redeemed via DoGet().
+       *
+       * If the list is empty, the expectation is that the ticket can only
+       * be redeemed on the current service where the ticket was
+       * generated.
+       *
+       * If the list is not empty, the expectation is that the ticket can be
+       * redeemed at any of the locations, and that the data returned will be
+       * equivalent. In this case, the ticket may only be redeemed at one of the
+       * given locations, and not (necessarily) on the current service. If one
+       * of the given locations is "arrow-flight-reuse-connection://?", the
+       * client may redeem the ticket on the service where the ticket was
+       * generated (i.e., the same as above), in addition to the other
+       * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+       * or 'scheme://' are not accepted by Java's java.net.URI.)
+       *
+       * In other words, an application can use multiple locations to
+       * represent redundant and/or load balanced services.
+       * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + public org.apache.arrow.flight.impl.Flight.Location.Builder addLocationBuilder( + int index) { + return getLocationFieldBuilder().addBuilder( + index, org.apache.arrow.flight.impl.Flight.Location.getDefaultInstance()); + } + /** + *
+       *
+       * A list of URIs where this ticket can be redeemed via DoGet().
+       *
+       * If the list is empty, the expectation is that the ticket can only
+       * be redeemed on the current service where the ticket was
+       * generated.
+       *
+       * If the list is not empty, the expectation is that the ticket can be
+       * redeemed at any of the locations, and that the data returned will be
+       * equivalent. In this case, the ticket may only be redeemed at one of the
+       * given locations, and not (necessarily) on the current service. If one
+       * of the given locations is "arrow-flight-reuse-connection://?", the
+       * client may redeem the ticket on the service where the ticket was
+       * generated (i.e., the same as above), in addition to the other
+       * locations. (This URI was chosen to maximize compatibility, as 'scheme:'
+       * or 'scheme://' are not accepted by Java's java.net.URI.)
+       *
+       * In other words, an application can use multiple locations to
+       * represent redundant and/or load balanced services.
+       * 
+ * + * repeated .arrow.flight.protocol.Location location = 2; + */ + public java.util.List + getLocationBuilderList() { + return getLocationFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.Location, org.apache.arrow.flight.impl.Flight.Location.Builder, org.apache.arrow.flight.impl.Flight.LocationOrBuilder> + getLocationFieldBuilder() { + if (locationBuilder_ == null) { + locationBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.Location, org.apache.arrow.flight.impl.Flight.Location.Builder, org.apache.arrow.flight.impl.Flight.LocationOrBuilder>( + location_, + ((bitField0_ & 0x00000002) != 0), + getParentForChildren(), + isClean()); + location_ = null; + } + return locationBuilder_; + } + + private com.google.protobuf.Timestamp expirationTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> expirationTimeBuilder_; + /** + *
+       *
+       * Expiration time of this stream. If present, clients may assume
+       * they can retry DoGet requests. Otherwise, it is
+       * application-defined whether DoGet requests may be retried.
+       * 
+ * + * .google.protobuf.Timestamp expiration_time = 3; + * @return Whether the expirationTime field is set. + */ + public boolean hasExpirationTime() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + *
+       *
+       * Expiration time of this stream. If present, clients may assume
+       * they can retry DoGet requests. Otherwise, it is
+       * application-defined whether DoGet requests may be retried.
+       * 
+ * + * .google.protobuf.Timestamp expiration_time = 3; + * @return The expirationTime. + */ + public com.google.protobuf.Timestamp getExpirationTime() { + if (expirationTimeBuilder_ == null) { + return expirationTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : expirationTime_; + } else { + return expirationTimeBuilder_.getMessage(); + } + } + /** + *
+       *
+       * Expiration time of this stream. If present, clients may assume
+       * they can retry DoGet requests. Otherwise, it is
+       * application-defined whether DoGet requests may be retried.
+       * 
+ * + * .google.protobuf.Timestamp expiration_time = 3; + */ + public Builder setExpirationTime(com.google.protobuf.Timestamp value) { + if (expirationTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + expirationTime_ = value; + } else { + expirationTimeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       *
+       * Expiration time of this stream. If present, clients may assume
+       * they can retry DoGet requests. Otherwise, it is
+       * application-defined whether DoGet requests may be retried.
+       * 
+ * + * .google.protobuf.Timestamp expiration_time = 3; + */ + public Builder setExpirationTime( + com.google.protobuf.Timestamp.Builder builderForValue) { + if (expirationTimeBuilder_ == null) { + expirationTime_ = builderForValue.build(); + } else { + expirationTimeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       *
+       * Expiration time of this stream. If present, clients may assume
+       * they can retry DoGet requests. Otherwise, it is
+       * application-defined whether DoGet requests may be retried.
+       * 
+ * + * .google.protobuf.Timestamp expiration_time = 3; + */ + public Builder mergeExpirationTime(com.google.protobuf.Timestamp value) { + if (expirationTimeBuilder_ == null) { + if (((bitField0_ & 0x00000004) != 0) && + expirationTime_ != null && + expirationTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { + getExpirationTimeBuilder().mergeFrom(value); + } else { + expirationTime_ = value; + } + } else { + expirationTimeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       *
+       * Expiration time of this stream. If present, clients may assume
+       * they can retry DoGet requests. Otherwise, it is
+       * application-defined whether DoGet requests may be retried.
+       * 
+ * + * .google.protobuf.Timestamp expiration_time = 3; + */ + public Builder clearExpirationTime() { + bitField0_ = (bitField0_ & ~0x00000004); + expirationTime_ = null; + if (expirationTimeBuilder_ != null) { + expirationTimeBuilder_.dispose(); + expirationTimeBuilder_ = null; + } + onChanged(); + return this; + } + /** + *
+       *
+       * Expiration time of this stream. If present, clients may assume
+       * they can retry DoGet requests. Otherwise, it is
+       * application-defined whether DoGet requests may be retried.
+       * 
+ * + * .google.protobuf.Timestamp expiration_time = 3; + */ + public com.google.protobuf.Timestamp.Builder getExpirationTimeBuilder() { + bitField0_ |= 0x00000004; + onChanged(); + return getExpirationTimeFieldBuilder().getBuilder(); + } + /** + *
+       *
+       * Expiration time of this stream. If present, clients may assume
+       * they can retry DoGet requests. Otherwise, it is
+       * application-defined whether DoGet requests may be retried.
+       * 
+ * + * .google.protobuf.Timestamp expiration_time = 3; + */ + public com.google.protobuf.TimestampOrBuilder getExpirationTimeOrBuilder() { + if (expirationTimeBuilder_ != null) { + return expirationTimeBuilder_.getMessageOrBuilder(); + } else { + return expirationTime_ == null ? + com.google.protobuf.Timestamp.getDefaultInstance() : expirationTime_; + } + } + /** + *
+       *
+       * Expiration time of this stream. If present, clients may assume
+       * they can retry DoGet requests. Otherwise, it is
+       * application-defined whether DoGet requests may be retried.
+       * 
+ * + * .google.protobuf.Timestamp expiration_time = 3; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> + getExpirationTimeFieldBuilder() { + if (expirationTimeBuilder_ == null) { + expirationTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( + getExpirationTime(), + getParentForChildren(), + isClean()); + expirationTime_ = null; + } + return expirationTimeBuilder_; + } + + private com.google.protobuf.ByteString appMetadata_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       *
+       * Application-defined metadata.
+       *
+       * There is no inherent or required relationship between this
+       * and the app_metadata fields in the FlightInfo or resulting
+       * FlightData messages. Since this metadata is application-defined,
+       * a given application could define there to be a relationship,
+       * but there is none required by the spec.
+       * 
+ * + * bytes app_metadata = 4; + * @return The appMetadata. + */ + @java.lang.Override + public com.google.protobuf.ByteString getAppMetadata() { + return appMetadata_; + } + /** + *
+       *
+       * Application-defined metadata.
+       *
+       * There is no inherent or required relationship between this
+       * and the app_metadata fields in the FlightInfo or resulting
+       * FlightData messages. Since this metadata is application-defined,
+       * a given application could define there to be a relationship,
+       * but there is none required by the spec.
+       * 
+ * + * bytes app_metadata = 4; + * @param value The appMetadata to set. + * @return This builder for chaining. + */ + public Builder setAppMetadata(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + appMetadata_ = value; + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + *
+       *
+       * Application-defined metadata.
+       *
+       * There is no inherent or required relationship between this
+       * and the app_metadata fields in the FlightInfo or resulting
+       * FlightData messages. Since this metadata is application-defined,
+       * a given application could define there to be a relationship,
+       * but there is none required by the spec.
+       * 
+ * + * bytes app_metadata = 4; + * @return This builder for chaining. + */ + public Builder clearAppMetadata() { + bitField0_ = (bitField0_ & ~0x00000008); + appMetadata_ = getDefaultInstance().getAppMetadata(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.FlightEndpoint) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.FlightEndpoint) + private static final org.apache.arrow.flight.impl.Flight.FlightEndpoint DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.FlightEndpoint(); + } + + public static org.apache.arrow.flight.impl.Flight.FlightEndpoint getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public FlightEndpoint parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightEndpoint getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface LocationOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.Location) + com.google.protobuf.MessageOrBuilder { + + /** + * string uri = 1; + * @return The uri. + */ + java.lang.String getUri(); + /** + * string uri = 1; + * @return The bytes for uri. + */ + com.google.protobuf.ByteString + getUriBytes(); + } + /** + *
+   *
+   * A location where a Flight service will accept retrieval of a particular
+   * stream given a ticket.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.Location} + */ + public static final class Location extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.Location) + LocationOrBuilder { + private static final long serialVersionUID = 0L; + // Use Location.newBuilder() to construct. + private Location(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Location() { + uri_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new Location(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Location_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Location_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.Location.class, org.apache.arrow.flight.impl.Flight.Location.Builder.class); + } + + public static final int URI_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object uri_ = ""; + /** + * string uri = 1; + * @return The uri. + */ + @java.lang.Override + public java.lang.String getUri() { + java.lang.Object ref = uri_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + uri_ = s; + return s; + } + } + /** + * string uri = 1; + * @return The bytes for uri. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getUriBytes() { + java.lang.Object ref = uri_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + uri_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uri_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, uri_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uri_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, uri_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.Location)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.Location other = (org.apache.arrow.flight.impl.Flight.Location) obj; + + if (!getUri() + .equals(other.getUri())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + URI_FIELD_NUMBER; + hash = (53 * hash) + getUri().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.Location parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.Location parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Location parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.Location parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Location parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.Location parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Location parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.Location parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.Location parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.Location parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Location parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.Location parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.Location prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * A location where a Flight service will accept retrieval of a particular
+     * stream given a ticket.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.Location} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.Location) + org.apache.arrow.flight.impl.Flight.LocationOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Location_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Location_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.Location.class, org.apache.arrow.flight.impl.Flight.Location.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.Location.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + uri_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Location_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Location getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.Location.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Location build() { + org.apache.arrow.flight.impl.Flight.Location result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Location buildPartial() { + org.apache.arrow.flight.impl.Flight.Location result = new org.apache.arrow.flight.impl.Flight.Location(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.Location result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.uri_ = uri_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.Location) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.Location)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.Location other) { + if (other == org.apache.arrow.flight.impl.Flight.Location.getDefaultInstance()) return this; + if (!other.getUri().isEmpty()) { + uri_ = other.uri_; + bitField0_ |= 0x00000001; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + uri_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object uri_ = ""; + /** + * string uri = 1; + * @return The uri. + */ + public java.lang.String getUri() { + java.lang.Object ref = uri_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + uri_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string uri = 1; + * @return The bytes for uri. + */ + public com.google.protobuf.ByteString + getUriBytes() { + java.lang.Object ref = uri_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + uri_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string uri = 1; + * @param value The uri to set. + * @return This builder for chaining. + */ + public Builder setUri( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + uri_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * string uri = 1; + * @return This builder for chaining. + */ + public Builder clearUri() { + uri_ = getDefaultInstance().getUri(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * string uri = 1; + * @param value The bytes for uri to set. + * @return This builder for chaining. + */ + public Builder setUriBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + uri_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.Location) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.Location) + private static final org.apache.arrow.flight.impl.Flight.Location DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.Location(); + } + + public static org.apache.arrow.flight.impl.Flight.Location getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Location parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Location getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface TicketOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.Ticket) + com.google.protobuf.MessageOrBuilder { + + /** + * bytes ticket = 1; + * @return The ticket. + */ + com.google.protobuf.ByteString getTicket(); + } + /** + *
+   *
+   * An opaque identifier that the service can use to retrieve a particular
+   * portion of a stream.
+   *
+   * Tickets are meant to be single use. It is an error/application-defined
+   * behavior to reuse a ticket.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.Ticket} + */ + public static final class Ticket extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.Ticket) + TicketOrBuilder { + private static final long serialVersionUID = 0L; + // Use Ticket.newBuilder() to construct. + private Ticket(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Ticket() { + ticket_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new Ticket(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Ticket_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Ticket_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.Ticket.class, org.apache.arrow.flight.impl.Flight.Ticket.Builder.class); + } + + public static final int TICKET_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString ticket_ = com.google.protobuf.ByteString.EMPTY; + /** + * bytes ticket = 1; + * @return The ticket. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTicket() { + return ticket_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!ticket_.isEmpty()) { + output.writeBytes(1, ticket_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!ticket_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, ticket_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.Ticket)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.Ticket other = (org.apache.arrow.flight.impl.Flight.Ticket) obj; + + if (!getTicket() + .equals(other.getTicket())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + TICKET_FIELD_NUMBER; + hash = (53 * hash) + getTicket().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.Ticket parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.Ticket parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Ticket parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.Ticket parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Ticket parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.Ticket parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Ticket parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.Ticket parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.Ticket parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.Ticket parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.Ticket parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.Ticket parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.Ticket prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * An opaque identifier that the service can use to retrieve a particular
+     * portion of a stream.
+     *
+     * Tickets are meant to be single use. It is an error/application-defined
+     * behavior to reuse a ticket.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.Ticket} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.Ticket) + org.apache.arrow.flight.impl.Flight.TicketOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Ticket_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Ticket_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.Ticket.class, org.apache.arrow.flight.impl.Flight.Ticket.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.Ticket.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + ticket_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_Ticket_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Ticket getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.Ticket.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Ticket build() { + org.apache.arrow.flight.impl.Flight.Ticket result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Ticket buildPartial() { + org.apache.arrow.flight.impl.Flight.Ticket result = new org.apache.arrow.flight.impl.Flight.Ticket(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.Ticket result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.ticket_ = ticket_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.Ticket) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.Ticket)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.Ticket other) { + if (other == org.apache.arrow.flight.impl.Flight.Ticket.getDefaultInstance()) return this; + if (other.getTicket() != com.google.protobuf.ByteString.EMPTY) { + setTicket(other.getTicket()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + ticket_ = input.readBytes(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.ByteString ticket_ = com.google.protobuf.ByteString.EMPTY; + /** + * bytes ticket = 1; + * @return The ticket. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTicket() { + return ticket_; + } + /** + * bytes ticket = 1; + * @param value The ticket to set. + * @return This builder for chaining. + */ + public Builder setTicket(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + ticket_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * bytes ticket = 1; + * @return This builder for chaining. + */ + public Builder clearTicket() { + bitField0_ = (bitField0_ & ~0x00000001); + ticket_ = getDefaultInstance().getTicket(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.Ticket) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.Ticket) + private static final org.apache.arrow.flight.impl.Flight.Ticket DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.Ticket(); + } + + public static org.apache.arrow.flight.impl.Flight.Ticket getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Ticket parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.Ticket getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface FlightDataOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.FlightData) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     *
+     * The descriptor of the data. This is only relevant when a client is
+     * starting a new DoPut stream.
+     * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 1; + * @return Whether the flightDescriptor field is set. + */ + boolean hasFlightDescriptor(); + /** + *
+     *
+     * The descriptor of the data. This is only relevant when a client is
+     * starting a new DoPut stream.
+     * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 1; + * @return The flightDescriptor. + */ + org.apache.arrow.flight.impl.Flight.FlightDescriptor getFlightDescriptor(); + /** + *
+     *
+     * The descriptor of the data. This is only relevant when a client is
+     * starting a new DoPut stream.
+     * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 1; + */ + org.apache.arrow.flight.impl.Flight.FlightDescriptorOrBuilder getFlightDescriptorOrBuilder(); + + /** + *
+     *
+     * Header for message data as described in Message.fbs::Message.
+     * 
+ * + * bytes data_header = 2; + * @return The dataHeader. + */ + com.google.protobuf.ByteString getDataHeader(); + + /** + *
+     *
+     * Application-defined metadata.
+     * 
+ * + * bytes app_metadata = 3; + * @return The appMetadata. + */ + com.google.protobuf.ByteString getAppMetadata(); + + /** + *
+     *
+     * The actual batch of Arrow data. Preferably handled with minimal-copies
+     * coming last in the definition to help with sidecar patterns (it is
+     * expected that some implementations will fetch this field off the wire
+     * with specialized code to avoid extra memory copies).
+     * 
+ * + * bytes data_body = 1000; + * @return The dataBody. + */ + com.google.protobuf.ByteString getDataBody(); + } + /** + *
+   *
+   * A batch of Arrow data as part of a stream of batches.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.FlightData} + */ + public static final class FlightData extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.FlightData) + FlightDataOrBuilder { + private static final long serialVersionUID = 0L; + // Use FlightData.newBuilder() to construct. + private FlightData(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private FlightData() { + dataHeader_ = com.google.protobuf.ByteString.EMPTY; + appMetadata_ = com.google.protobuf.ByteString.EMPTY; + dataBody_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new FlightData(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_FlightData_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_FlightData_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.FlightData.class, org.apache.arrow.flight.impl.Flight.FlightData.Builder.class); + } + + public static final int FLIGHT_DESCRIPTOR_FIELD_NUMBER = 1; + private org.apache.arrow.flight.impl.Flight.FlightDescriptor flightDescriptor_; + /** + *
+     *
+     * The descriptor of the data. This is only relevant when a client is
+     * starting a new DoPut stream.
+     * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 1; + * @return Whether the flightDescriptor field is set. + */ + @java.lang.Override + public boolean hasFlightDescriptor() { + return flightDescriptor_ != null; + } + /** + *
+     *
+     * The descriptor of the data. This is only relevant when a client is
+     * starting a new DoPut stream.
+     * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 1; + * @return The flightDescriptor. + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightDescriptor getFlightDescriptor() { + return flightDescriptor_ == null ? org.apache.arrow.flight.impl.Flight.FlightDescriptor.getDefaultInstance() : flightDescriptor_; + } + /** + *
+     *
+     * The descriptor of the data. This is only relevant when a client is
+     * starting a new DoPut stream.
+     * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 1; + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightDescriptorOrBuilder getFlightDescriptorOrBuilder() { + return flightDescriptor_ == null ? org.apache.arrow.flight.impl.Flight.FlightDescriptor.getDefaultInstance() : flightDescriptor_; + } + + public static final int DATA_HEADER_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString dataHeader_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     *
+     * Header for message data as described in Message.fbs::Message.
+     * 
+ * + * bytes data_header = 2; + * @return The dataHeader. + */ + @java.lang.Override + public com.google.protobuf.ByteString getDataHeader() { + return dataHeader_; + } + + public static final int APP_METADATA_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString appMetadata_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     *
+     * Application-defined metadata.
+     * 
+ * + * bytes app_metadata = 3; + * @return The appMetadata. + */ + @java.lang.Override + public com.google.protobuf.ByteString getAppMetadata() { + return appMetadata_; + } + + public static final int DATA_BODY_FIELD_NUMBER = 1000; + private com.google.protobuf.ByteString dataBody_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     *
+     * The actual batch of Arrow data. Preferably handled with minimal-copies
+     * coming last in the definition to help with sidecar patterns (it is
+     * expected that some implementations will fetch this field off the wire
+     * with specialized code to avoid extra memory copies).
+     * 
+ * + * bytes data_body = 1000; + * @return The dataBody. + */ + @java.lang.Override + public com.google.protobuf.ByteString getDataBody() { + return dataBody_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (flightDescriptor_ != null) { + output.writeMessage(1, getFlightDescriptor()); + } + if (!dataHeader_.isEmpty()) { + output.writeBytes(2, dataHeader_); + } + if (!appMetadata_.isEmpty()) { + output.writeBytes(3, appMetadata_); + } + if (!dataBody_.isEmpty()) { + output.writeBytes(1000, dataBody_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (flightDescriptor_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getFlightDescriptor()); + } + if (!dataHeader_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, dataHeader_); + } + if (!appMetadata_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, appMetadata_); + } + if (!dataBody_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1000, dataBody_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.FlightData)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.FlightData other = (org.apache.arrow.flight.impl.Flight.FlightData) obj; + + if (hasFlightDescriptor() != other.hasFlightDescriptor()) return false; + if (hasFlightDescriptor()) { + if (!getFlightDescriptor() + .equals(other.getFlightDescriptor())) return false; + } + if (!getDataHeader() + .equals(other.getDataHeader())) return false; + if (!getAppMetadata() + .equals(other.getAppMetadata())) return false; + if (!getDataBody() + .equals(other.getDataBody())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasFlightDescriptor()) { + hash = (37 * hash) + FLIGHT_DESCRIPTOR_FIELD_NUMBER; + hash = (53 * hash) + getFlightDescriptor().hashCode(); + } + hash = (37 * hash) + DATA_HEADER_FIELD_NUMBER; + hash = (53 * hash) + getDataHeader().hashCode(); + hash = (37 * hash) + APP_METADATA_FIELD_NUMBER; + hash = (53 * hash) + getAppMetadata().hashCode(); + hash = (37 * hash) + DATA_BODY_FIELD_NUMBER; + hash = (53 * hash) + getDataBody().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.FlightData parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.FlightData parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.FlightData parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.FlightData parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.FlightData parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.FlightData parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.FlightData parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.FlightData parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.FlightData parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.FlightData parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.FlightData parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.FlightData parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.FlightData prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * A batch of Arrow data as part of a stream of batches.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.FlightData} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.FlightData) + org.apache.arrow.flight.impl.Flight.FlightDataOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_FlightData_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_FlightData_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.FlightData.class, org.apache.arrow.flight.impl.Flight.FlightData.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.FlightData.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + flightDescriptor_ = null; + if (flightDescriptorBuilder_ != null) { + flightDescriptorBuilder_.dispose(); + flightDescriptorBuilder_ = null; + } + dataHeader_ = com.google.protobuf.ByteString.EMPTY; + appMetadata_ = com.google.protobuf.ByteString.EMPTY; + dataBody_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_FlightData_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightData getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.FlightData.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightData build() { + org.apache.arrow.flight.impl.Flight.FlightData result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightData buildPartial() { + org.apache.arrow.flight.impl.Flight.FlightData result = new org.apache.arrow.flight.impl.Flight.FlightData(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.FlightData result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.flightDescriptor_ = flightDescriptorBuilder_ == null + ? flightDescriptor_ + : flightDescriptorBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.dataHeader_ = dataHeader_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.appMetadata_ = appMetadata_; + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.dataBody_ = dataBody_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.FlightData) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.FlightData)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.FlightData other) { + if (other == org.apache.arrow.flight.impl.Flight.FlightData.getDefaultInstance()) return this; + if (other.hasFlightDescriptor()) { + mergeFlightDescriptor(other.getFlightDescriptor()); + } + if (other.getDataHeader() != com.google.protobuf.ByteString.EMPTY) { + setDataHeader(other.getDataHeader()); + } + if (other.getAppMetadata() != com.google.protobuf.ByteString.EMPTY) { + setAppMetadata(other.getAppMetadata()); + } + if (other.getDataBody() != com.google.protobuf.ByteString.EMPTY) { + setDataBody(other.getDataBody()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + input.readMessage( + getFlightDescriptorFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + dataHeader_ = input.readBytes(); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: { + appMetadata_ = input.readBytes(); + bitField0_ |= 0x00000004; + break; + } // case 26 + case 8002: { + dataBody_ = input.readBytes(); + bitField0_ |= 0x00000008; + break; + } // case 8002 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private org.apache.arrow.flight.impl.Flight.FlightDescriptor flightDescriptor_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.FlightDescriptor, org.apache.arrow.flight.impl.Flight.FlightDescriptor.Builder, org.apache.arrow.flight.impl.Flight.FlightDescriptorOrBuilder> flightDescriptorBuilder_; + /** + *
+       *
+       * The descriptor of the data. This is only relevant when a client is
+       * starting a new DoPut stream.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 1; + * @return Whether the flightDescriptor field is set. + */ + public boolean hasFlightDescriptor() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+       *
+       * The descriptor of the data. This is only relevant when a client is
+       * starting a new DoPut stream.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 1; + * @return The flightDescriptor. + */ + public org.apache.arrow.flight.impl.Flight.FlightDescriptor getFlightDescriptor() { + if (flightDescriptorBuilder_ == null) { + return flightDescriptor_ == null ? org.apache.arrow.flight.impl.Flight.FlightDescriptor.getDefaultInstance() : flightDescriptor_; + } else { + return flightDescriptorBuilder_.getMessage(); + } + } + /** + *
+       *
+       * The descriptor of the data. This is only relevant when a client is
+       * starting a new DoPut stream.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 1; + */ + public Builder setFlightDescriptor(org.apache.arrow.flight.impl.Flight.FlightDescriptor value) { + if (flightDescriptorBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + flightDescriptor_ = value; + } else { + flightDescriptorBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       *
+       * The descriptor of the data. This is only relevant when a client is
+       * starting a new DoPut stream.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 1; + */ + public Builder setFlightDescriptor( + org.apache.arrow.flight.impl.Flight.FlightDescriptor.Builder builderForValue) { + if (flightDescriptorBuilder_ == null) { + flightDescriptor_ = builderForValue.build(); + } else { + flightDescriptorBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       *
+       * The descriptor of the data. This is only relevant when a client is
+       * starting a new DoPut stream.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 1; + */ + public Builder mergeFlightDescriptor(org.apache.arrow.flight.impl.Flight.FlightDescriptor value) { + if (flightDescriptorBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0) && + flightDescriptor_ != null && + flightDescriptor_ != org.apache.arrow.flight.impl.Flight.FlightDescriptor.getDefaultInstance()) { + getFlightDescriptorBuilder().mergeFrom(value); + } else { + flightDescriptor_ = value; + } + } else { + flightDescriptorBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       *
+       * The descriptor of the data. This is only relevant when a client is
+       * starting a new DoPut stream.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 1; + */ + public Builder clearFlightDescriptor() { + bitField0_ = (bitField0_ & ~0x00000001); + flightDescriptor_ = null; + if (flightDescriptorBuilder_ != null) { + flightDescriptorBuilder_.dispose(); + flightDescriptorBuilder_ = null; + } + onChanged(); + return this; + } + /** + *
+       *
+       * The descriptor of the data. This is only relevant when a client is
+       * starting a new DoPut stream.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 1; + */ + public org.apache.arrow.flight.impl.Flight.FlightDescriptor.Builder getFlightDescriptorBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getFlightDescriptorFieldBuilder().getBuilder(); + } + /** + *
+       *
+       * The descriptor of the data. This is only relevant when a client is
+       * starting a new DoPut stream.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 1; + */ + public org.apache.arrow.flight.impl.Flight.FlightDescriptorOrBuilder getFlightDescriptorOrBuilder() { + if (flightDescriptorBuilder_ != null) { + return flightDescriptorBuilder_.getMessageOrBuilder(); + } else { + return flightDescriptor_ == null ? + org.apache.arrow.flight.impl.Flight.FlightDescriptor.getDefaultInstance() : flightDescriptor_; + } + } + /** + *
+       *
+       * The descriptor of the data. This is only relevant when a client is
+       * starting a new DoPut stream.
+       * 
+ * + * .arrow.flight.protocol.FlightDescriptor flight_descriptor = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.FlightDescriptor, org.apache.arrow.flight.impl.Flight.FlightDescriptor.Builder, org.apache.arrow.flight.impl.Flight.FlightDescriptorOrBuilder> + getFlightDescriptorFieldBuilder() { + if (flightDescriptorBuilder_ == null) { + flightDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.FlightDescriptor, org.apache.arrow.flight.impl.Flight.FlightDescriptor.Builder, org.apache.arrow.flight.impl.Flight.FlightDescriptorOrBuilder>( + getFlightDescriptor(), + getParentForChildren(), + isClean()); + flightDescriptor_ = null; + } + return flightDescriptorBuilder_; + } + + private com.google.protobuf.ByteString dataHeader_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       *
+       * Header for message data as described in Message.fbs::Message.
+       * 
+ * + * bytes data_header = 2; + * @return The dataHeader. + */ + @java.lang.Override + public com.google.protobuf.ByteString getDataHeader() { + return dataHeader_; + } + /** + *
+       *
+       * Header for message data as described in Message.fbs::Message.
+       * 
+ * + * bytes data_header = 2; + * @param value The dataHeader to set. + * @return This builder for chaining. + */ + public Builder setDataHeader(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + dataHeader_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       *
+       * Header for message data as described in Message.fbs::Message.
+       * 
+ * + * bytes data_header = 2; + * @return This builder for chaining. + */ + public Builder clearDataHeader() { + bitField0_ = (bitField0_ & ~0x00000002); + dataHeader_ = getDefaultInstance().getDataHeader(); + onChanged(); + return this; + } + + private com.google.protobuf.ByteString appMetadata_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       *
+       * Application-defined metadata.
+       * 
+ * + * bytes app_metadata = 3; + * @return The appMetadata. + */ + @java.lang.Override + public com.google.protobuf.ByteString getAppMetadata() { + return appMetadata_; + } + /** + *
+       *
+       * Application-defined metadata.
+       * 
+ * + * bytes app_metadata = 3; + * @param value The appMetadata to set. + * @return This builder for chaining. + */ + public Builder setAppMetadata(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + appMetadata_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       *
+       * Application-defined metadata.
+       * 
+ * + * bytes app_metadata = 3; + * @return This builder for chaining. + */ + public Builder clearAppMetadata() { + bitField0_ = (bitField0_ & ~0x00000004); + appMetadata_ = getDefaultInstance().getAppMetadata(); + onChanged(); + return this; + } + + private com.google.protobuf.ByteString dataBody_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       *
+       * The actual batch of Arrow data. Preferably handled with minimal-copies
+       * coming last in the definition to help with sidecar patterns (it is
+       * expected that some implementations will fetch this field off the wire
+       * with specialized code to avoid extra memory copies).
+       * 
+ * + * bytes data_body = 1000; + * @return The dataBody. + */ + @java.lang.Override + public com.google.protobuf.ByteString getDataBody() { + return dataBody_; + } + /** + *
+       *
+       * The actual batch of Arrow data. Preferably handled with minimal-copies
+       * coming last in the definition to help with sidecar patterns (it is
+       * expected that some implementations will fetch this field off the wire
+       * with specialized code to avoid extra memory copies).
+       * 
+ * + * bytes data_body = 1000; + * @param value The dataBody to set. + * @return This builder for chaining. + */ + public Builder setDataBody(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + dataBody_ = value; + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + *
+       *
+       * The actual batch of Arrow data. Preferably handled with minimal-copies
+       * coming last in the definition to help with sidecar patterns (it is
+       * expected that some implementations will fetch this field off the wire
+       * with specialized code to avoid extra memory copies).
+       * 
+ * + * bytes data_body = 1000; + * @return This builder for chaining. + */ + public Builder clearDataBody() { + bitField0_ = (bitField0_ & ~0x00000008); + dataBody_ = getDefaultInstance().getDataBody(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.FlightData) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.FlightData) + private static final org.apache.arrow.flight.impl.Flight.FlightData DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.FlightData(); + } + + public static org.apache.arrow.flight.impl.Flight.FlightData getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public FlightData parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.FlightData getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface PutResultOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.PutResult) + com.google.protobuf.MessageOrBuilder { + + /** + * bytes app_metadata = 1; + * @return The appMetadata. + */ + com.google.protobuf.ByteString getAppMetadata(); + } + /** + *
+   **
+   * The response message associated with the submission of a DoPut.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.PutResult} + */ + public static final class PutResult extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.PutResult) + PutResultOrBuilder { + private static final long serialVersionUID = 0L; + // Use PutResult.newBuilder() to construct. + private PutResult(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private PutResult() { + appMetadata_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new PutResult(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_PutResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_PutResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.PutResult.class, org.apache.arrow.flight.impl.Flight.PutResult.Builder.class); + } + + public static final int APP_METADATA_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString appMetadata_ = com.google.protobuf.ByteString.EMPTY; + /** + * bytes app_metadata = 1; + * @return The appMetadata. + */ + @java.lang.Override + public com.google.protobuf.ByteString getAppMetadata() { + return appMetadata_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!appMetadata_.isEmpty()) { + output.writeBytes(1, appMetadata_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!appMetadata_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, appMetadata_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.PutResult)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.PutResult other = (org.apache.arrow.flight.impl.Flight.PutResult) obj; + + if (!getAppMetadata() + .equals(other.getAppMetadata())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + APP_METADATA_FIELD_NUMBER; + hash = (53 * hash) + getAppMetadata().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.PutResult parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.PutResult parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.PutResult parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.PutResult parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.PutResult parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.PutResult parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.PutResult parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.PutResult parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.PutResult parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.PutResult parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.PutResult parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.PutResult parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.PutResult prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     **
+     * The response message associated with the submission of a DoPut.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.PutResult} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.PutResult) + org.apache.arrow.flight.impl.Flight.PutResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_PutResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_PutResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.PutResult.class, org.apache.arrow.flight.impl.Flight.PutResult.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.PutResult.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + appMetadata_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_PutResult_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.PutResult getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.PutResult.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.PutResult build() { + org.apache.arrow.flight.impl.Flight.PutResult result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.PutResult buildPartial() { + org.apache.arrow.flight.impl.Flight.PutResult result = new org.apache.arrow.flight.impl.Flight.PutResult(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.PutResult result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.appMetadata_ = appMetadata_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.PutResult) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.PutResult)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.PutResult other) { + if (other == org.apache.arrow.flight.impl.Flight.PutResult.getDefaultInstance()) return this; + if (other.getAppMetadata() != com.google.protobuf.ByteString.EMPTY) { + setAppMetadata(other.getAppMetadata()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + appMetadata_ = input.readBytes(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.ByteString appMetadata_ = com.google.protobuf.ByteString.EMPTY; + /** + * bytes app_metadata = 1; + * @return The appMetadata. + */ + @java.lang.Override + public com.google.protobuf.ByteString getAppMetadata() { + return appMetadata_; + } + /** + * bytes app_metadata = 1; + * @param value The appMetadata to set. + * @return This builder for chaining. + */ + public Builder setAppMetadata(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + appMetadata_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * bytes app_metadata = 1; + * @return This builder for chaining. + */ + public Builder clearAppMetadata() { + bitField0_ = (bitField0_ & ~0x00000001); + appMetadata_ = getDefaultInstance().getAppMetadata(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.PutResult) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.PutResult) + private static final org.apache.arrow.flight.impl.Flight.PutResult DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.PutResult(); + } + + public static org.apache.arrow.flight.impl.Flight.PutResult getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public PutResult parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.PutResult getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface SessionOptionValueOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.SessionOptionValue) + com.google.protobuf.MessageOrBuilder { + + /** + * string string_value = 1; + * @return Whether the stringValue field is set. + */ + boolean hasStringValue(); + /** + * string string_value = 1; + * @return The stringValue. + */ + java.lang.String getStringValue(); + /** + * string string_value = 1; + * @return The bytes for stringValue. + */ + com.google.protobuf.ByteString + getStringValueBytes(); + + /** + * bool bool_value = 2; + * @return Whether the boolValue field is set. + */ + boolean hasBoolValue(); + /** + * bool bool_value = 2; + * @return The boolValue. + */ + boolean getBoolValue(); + + /** + * sfixed64 int64_value = 3; + * @return Whether the int64Value field is set. + */ + boolean hasInt64Value(); + /** + * sfixed64 int64_value = 3; + * @return The int64Value. + */ + long getInt64Value(); + + /** + * double double_value = 4; + * @return Whether the doubleValue field is set. + */ + boolean hasDoubleValue(); + /** + * double double_value = 4; + * @return The doubleValue. + */ + double getDoubleValue(); + + /** + * .arrow.flight.protocol.SessionOptionValue.StringListValue string_list_value = 5; + * @return Whether the stringListValue field is set. + */ + boolean hasStringListValue(); + /** + * .arrow.flight.protocol.SessionOptionValue.StringListValue string_list_value = 5; + * @return The stringListValue. + */ + org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue getStringListValue(); + /** + * .arrow.flight.protocol.SessionOptionValue.StringListValue string_list_value = 5; + */ + org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValueOrBuilder getStringListValueOrBuilder(); + + org.apache.arrow.flight.impl.Flight.SessionOptionValue.OptionValueCase getOptionValueCase(); + } + /** + *
+   *
+   * EXPERIMENTAL: Union of possible value types for a Session Option to be set to.
+   *
+   * By convention, an attempt to set a valueless SessionOptionValue should
+   * attempt to unset or clear the named option value on the server.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.SessionOptionValue} + */ + public static final class SessionOptionValue extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.SessionOptionValue) + SessionOptionValueOrBuilder { + private static final long serialVersionUID = 0L; + // Use SessionOptionValue.newBuilder() to construct. + private SessionOptionValue(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private SessionOptionValue() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new SessionOptionValue(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SessionOptionValue_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SessionOptionValue_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.SessionOptionValue.class, org.apache.arrow.flight.impl.Flight.SessionOptionValue.Builder.class); + } + + public interface StringListValueOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.SessionOptionValue.StringListValue) + com.google.protobuf.MessageOrBuilder { + + /** + * repeated string values = 1; + * @return A list containing the values. + */ + java.util.List + getValuesList(); + /** + * repeated string values = 1; + * @return The count of values. + */ + int getValuesCount(); + /** + * repeated string values = 1; + * @param index The index of the element to return. + * @return The values at the given index. + */ + java.lang.String getValues(int index); + /** + * repeated string values = 1; + * @param index The index of the value to return. + * @return The bytes of the values at the given index. + */ + com.google.protobuf.ByteString + getValuesBytes(int index); + } + /** + * Protobuf type {@code arrow.flight.protocol.SessionOptionValue.StringListValue} + */ + public static final class StringListValue extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.SessionOptionValue.StringListValue) + StringListValueOrBuilder { + private static final long serialVersionUID = 0L; + // Use StringListValue.newBuilder() to construct. + private StringListValue(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private StringListValue() { + values_ = + com.google.protobuf.LazyStringArrayList.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new StringListValue(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SessionOptionValue_StringListValue_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SessionOptionValue_StringListValue_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue.class, org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue.Builder.class); + } + + public static final int VALUES_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private com.google.protobuf.LazyStringArrayList values_ = + com.google.protobuf.LazyStringArrayList.emptyList(); + /** + * repeated string values = 1; + * @return A list containing the values. + */ + public com.google.protobuf.ProtocolStringList + getValuesList() { + return values_; + } + /** + * repeated string values = 1; + * @return The count of values. + */ + public int getValuesCount() { + return values_.size(); + } + /** + * repeated string values = 1; + * @param index The index of the element to return. + * @return The values at the given index. + */ + public java.lang.String getValues(int index) { + return values_.get(index); + } + /** + * repeated string values = 1; + * @param index The index of the value to return. + * @return The bytes of the values at the given index. + */ + public com.google.protobuf.ByteString + getValuesBytes(int index) { + return values_.getByteString(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < values_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, values_.getRaw(i)); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < values_.size(); i++) { + dataSize += computeStringSizeNoTag(values_.getRaw(i)); + } + size += dataSize; + size += 1 * getValuesList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue other = (org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue) obj; + + if (!getValuesList() + .equals(other.getValuesList())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getValuesCount() > 0) { + hash = (37 * hash) + VALUES_FIELD_NUMBER; + hash = (53 * hash) + getValuesList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code arrow.flight.protocol.SessionOptionValue.StringListValue} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.SessionOptionValue.StringListValue) + org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValueOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SessionOptionValue_StringListValue_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SessionOptionValue_StringListValue_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue.class, org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + values_ = + com.google.protobuf.LazyStringArrayList.emptyList(); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SessionOptionValue_StringListValue_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue build() { + org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue buildPartial() { + org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue result = new org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + values_.makeImmutable(); + result.values_ = values_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue other) { + if (other == org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue.getDefaultInstance()) return this; + if (!other.values_.isEmpty()) { + if (values_.isEmpty()) { + values_ = other.values_; + bitField0_ |= 0x00000001; + } else { + ensureValuesIsMutable(); + values_.addAll(other.values_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + ensureValuesIsMutable(); + values_.add(s); + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.LazyStringArrayList values_ = + com.google.protobuf.LazyStringArrayList.emptyList(); + private void ensureValuesIsMutable() { + if (!values_.isModifiable()) { + values_ = new com.google.protobuf.LazyStringArrayList(values_); + } + bitField0_ |= 0x00000001; + } + /** + * repeated string values = 1; + * @return A list containing the values. + */ + public com.google.protobuf.ProtocolStringList + getValuesList() { + values_.makeImmutable(); + return values_; + } + /** + * repeated string values = 1; + * @return The count of values. + */ + public int getValuesCount() { + return values_.size(); + } + /** + * repeated string values = 1; + * @param index The index of the element to return. + * @return The values at the given index. + */ + public java.lang.String getValues(int index) { + return values_.get(index); + } + /** + * repeated string values = 1; + * @param index The index of the value to return. + * @return The bytes of the values at the given index. + */ + public com.google.protobuf.ByteString + getValuesBytes(int index) { + return values_.getByteString(index); + } + /** + * repeated string values = 1; + * @param index The index to set the value at. + * @param value The values to set. + * @return This builder for chaining. + */ + public Builder setValues( + int index, java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + ensureValuesIsMutable(); + values_.set(index, value); + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * repeated string values = 1; + * @param value The values to add. + * @return This builder for chaining. + */ + public Builder addValues( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + ensureValuesIsMutable(); + values_.add(value); + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * repeated string values = 1; + * @param values The values to add. + * @return This builder for chaining. + */ + public Builder addAllValues( + java.lang.Iterable values) { + ensureValuesIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, values_); + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * repeated string values = 1; + * @return This builder for chaining. + */ + public Builder clearValues() { + values_ = + com.google.protobuf.LazyStringArrayList.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001);; + onChanged(); + return this; + } + /** + * repeated string values = 1; + * @param value The bytes of the values to add. + * @return This builder for chaining. + */ + public Builder addValuesBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + ensureValuesIsMutable(); + values_.add(value); + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.SessionOptionValue.StringListValue) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.SessionOptionValue.StringListValue) + private static final org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue(); + } + + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public StringListValue parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + private int optionValueCase_ = 0; + @SuppressWarnings("serial") + private java.lang.Object optionValue_; + public enum OptionValueCase + implements com.google.protobuf.Internal.EnumLite, + com.google.protobuf.AbstractMessage.InternalOneOfEnum { + STRING_VALUE(1), + BOOL_VALUE(2), + INT64_VALUE(3), + DOUBLE_VALUE(4), + STRING_LIST_VALUE(5), + OPTIONVALUE_NOT_SET(0); + private final int value; + private OptionValueCase(int value) { + this.value = value; + } + /** + * @param value The number of the enum to look for. + * @return The enum associated with the given number. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static OptionValueCase valueOf(int value) { + return forNumber(value); + } + + public static OptionValueCase forNumber(int value) { + switch (value) { + case 1: return STRING_VALUE; + case 2: return BOOL_VALUE; + case 3: return INT64_VALUE; + case 4: return DOUBLE_VALUE; + case 5: return STRING_LIST_VALUE; + case 0: return OPTIONVALUE_NOT_SET; + default: return null; + } + } + public int getNumber() { + return this.value; + } + }; + + public OptionValueCase + getOptionValueCase() { + return OptionValueCase.forNumber( + optionValueCase_); + } + + public static final int STRING_VALUE_FIELD_NUMBER = 1; + /** + * string string_value = 1; + * @return Whether the stringValue field is set. + */ + public boolean hasStringValue() { + return optionValueCase_ == 1; + } + /** + * string string_value = 1; + * @return The stringValue. + */ + public java.lang.String getStringValue() { + java.lang.Object ref = ""; + if (optionValueCase_ == 1) { + ref = optionValue_; + } + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (optionValueCase_ == 1) { + optionValue_ = s; + } + return s; + } + } + /** + * string string_value = 1; + * @return The bytes for stringValue. + */ + public com.google.protobuf.ByteString + getStringValueBytes() { + java.lang.Object ref = ""; + if (optionValueCase_ == 1) { + ref = optionValue_; + } + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + if (optionValueCase_ == 1) { + optionValue_ = b; + } + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int BOOL_VALUE_FIELD_NUMBER = 2; + /** + * bool bool_value = 2; + * @return Whether the boolValue field is set. + */ + @java.lang.Override + public boolean hasBoolValue() { + return optionValueCase_ == 2; + } + /** + * bool bool_value = 2; + * @return The boolValue. + */ + @java.lang.Override + public boolean getBoolValue() { + if (optionValueCase_ == 2) { + return (java.lang.Boolean) optionValue_; + } + return false; + } + + public static final int INT64_VALUE_FIELD_NUMBER = 3; + /** + * sfixed64 int64_value = 3; + * @return Whether the int64Value field is set. + */ + @java.lang.Override + public boolean hasInt64Value() { + return optionValueCase_ == 3; + } + /** + * sfixed64 int64_value = 3; + * @return The int64Value. + */ + @java.lang.Override + public long getInt64Value() { + if (optionValueCase_ == 3) { + return (java.lang.Long) optionValue_; + } + return 0L; + } + + public static final int DOUBLE_VALUE_FIELD_NUMBER = 4; + /** + * double double_value = 4; + * @return Whether the doubleValue field is set. + */ + @java.lang.Override + public boolean hasDoubleValue() { + return optionValueCase_ == 4; + } + /** + * double double_value = 4; + * @return The doubleValue. + */ + @java.lang.Override + public double getDoubleValue() { + if (optionValueCase_ == 4) { + return (java.lang.Double) optionValue_; + } + return 0D; + } + + public static final int STRING_LIST_VALUE_FIELD_NUMBER = 5; + /** + * .arrow.flight.protocol.SessionOptionValue.StringListValue string_list_value = 5; + * @return Whether the stringListValue field is set. + */ + @java.lang.Override + public boolean hasStringListValue() { + return optionValueCase_ == 5; + } + /** + * .arrow.flight.protocol.SessionOptionValue.StringListValue string_list_value = 5; + * @return The stringListValue. + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue getStringListValue() { + if (optionValueCase_ == 5) { + return (org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue) optionValue_; + } + return org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue.getDefaultInstance(); + } + /** + * .arrow.flight.protocol.SessionOptionValue.StringListValue string_list_value = 5; + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValueOrBuilder getStringListValueOrBuilder() { + if (optionValueCase_ == 5) { + return (org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue) optionValue_; + } + return org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue.getDefaultInstance(); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (optionValueCase_ == 1) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, optionValue_); + } + if (optionValueCase_ == 2) { + output.writeBool( + 2, (boolean)((java.lang.Boolean) optionValue_)); + } + if (optionValueCase_ == 3) { + output.writeSFixed64( + 3, (long)((java.lang.Long) optionValue_)); + } + if (optionValueCase_ == 4) { + output.writeDouble( + 4, (double)((java.lang.Double) optionValue_)); + } + if (optionValueCase_ == 5) { + output.writeMessage(5, (org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue) optionValue_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (optionValueCase_ == 1) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, optionValue_); + } + if (optionValueCase_ == 2) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize( + 2, (boolean)((java.lang.Boolean) optionValue_)); + } + if (optionValueCase_ == 3) { + size += com.google.protobuf.CodedOutputStream + .computeSFixed64Size( + 3, (long)((java.lang.Long) optionValue_)); + } + if (optionValueCase_ == 4) { + size += com.google.protobuf.CodedOutputStream + .computeDoubleSize( + 4, (double)((java.lang.Double) optionValue_)); + } + if (optionValueCase_ == 5) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, (org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue) optionValue_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.SessionOptionValue)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.SessionOptionValue other = (org.apache.arrow.flight.impl.Flight.SessionOptionValue) obj; + + if (!getOptionValueCase().equals(other.getOptionValueCase())) return false; + switch (optionValueCase_) { + case 1: + if (!getStringValue() + .equals(other.getStringValue())) return false; + break; + case 2: + if (getBoolValue() + != other.getBoolValue()) return false; + break; + case 3: + if (getInt64Value() + != other.getInt64Value()) return false; + break; + case 4: + if (java.lang.Double.doubleToLongBits(getDoubleValue()) + != java.lang.Double.doubleToLongBits( + other.getDoubleValue())) return false; + break; + case 5: + if (!getStringListValue() + .equals(other.getStringListValue())) return false; + break; + case 0: + default: + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + switch (optionValueCase_) { + case 1: + hash = (37 * hash) + STRING_VALUE_FIELD_NUMBER; + hash = (53 * hash) + getStringValue().hashCode(); + break; + case 2: + hash = (37 * hash) + BOOL_VALUE_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getBoolValue()); + break; + case 3: + hash = (37 * hash) + INT64_VALUE_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getInt64Value()); + break; + case 4: + hash = (37 * hash) + DOUBLE_VALUE_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + java.lang.Double.doubleToLongBits(getDoubleValue())); + break; + case 5: + hash = (37 * hash) + STRING_LIST_VALUE_FIELD_NUMBER; + hash = (53 * hash) + getStringListValue().hashCode(); + break; + case 0: + default: + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.SessionOptionValue prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * EXPERIMENTAL: Union of possible value types for a Session Option to be set to.
+     *
+     * By convention, an attempt to set a valueless SessionOptionValue should
+     * attempt to unset or clear the named option value on the server.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.SessionOptionValue} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.SessionOptionValue) + org.apache.arrow.flight.impl.Flight.SessionOptionValueOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SessionOptionValue_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SessionOptionValue_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.SessionOptionValue.class, org.apache.arrow.flight.impl.Flight.SessionOptionValue.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.SessionOptionValue.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + if (stringListValueBuilder_ != null) { + stringListValueBuilder_.clear(); + } + optionValueCase_ = 0; + optionValue_ = null; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SessionOptionValue_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SessionOptionValue getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.SessionOptionValue.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SessionOptionValue build() { + org.apache.arrow.flight.impl.Flight.SessionOptionValue result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SessionOptionValue buildPartial() { + org.apache.arrow.flight.impl.Flight.SessionOptionValue result = new org.apache.arrow.flight.impl.Flight.SessionOptionValue(this); + if (bitField0_ != 0) { buildPartial0(result); } + buildPartialOneofs(result); + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.SessionOptionValue result) { + int from_bitField0_ = bitField0_; + } + + private void buildPartialOneofs(org.apache.arrow.flight.impl.Flight.SessionOptionValue result) { + result.optionValueCase_ = optionValueCase_; + result.optionValue_ = this.optionValue_; + if (optionValueCase_ == 5 && + stringListValueBuilder_ != null) { + result.optionValue_ = stringListValueBuilder_.build(); + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.SessionOptionValue) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.SessionOptionValue)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.SessionOptionValue other) { + if (other == org.apache.arrow.flight.impl.Flight.SessionOptionValue.getDefaultInstance()) return this; + switch (other.getOptionValueCase()) { + case STRING_VALUE: { + optionValueCase_ = 1; + optionValue_ = other.optionValue_; + onChanged(); + break; + } + case BOOL_VALUE: { + setBoolValue(other.getBoolValue()); + break; + } + case INT64_VALUE: { + setInt64Value(other.getInt64Value()); + break; + } + case DOUBLE_VALUE: { + setDoubleValue(other.getDoubleValue()); + break; + } + case STRING_LIST_VALUE: { + mergeStringListValue(other.getStringListValue()); + break; + } + case OPTIONVALUE_NOT_SET: { + break; + } + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + optionValueCase_ = 1; + optionValue_ = s; + break; + } // case 10 + case 16: { + optionValue_ = input.readBool(); + optionValueCase_ = 2; + break; + } // case 16 + case 25: { + optionValue_ = input.readSFixed64(); + optionValueCase_ = 3; + break; + } // case 25 + case 33: { + optionValue_ = input.readDouble(); + optionValueCase_ = 4; + break; + } // case 33 + case 42: { + input.readMessage( + getStringListValueFieldBuilder().getBuilder(), + extensionRegistry); + optionValueCase_ = 5; + break; + } // case 42 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int optionValueCase_ = 0; + private java.lang.Object optionValue_; + public OptionValueCase + getOptionValueCase() { + return OptionValueCase.forNumber( + optionValueCase_); + } + + public Builder clearOptionValue() { + optionValueCase_ = 0; + optionValue_ = null; + onChanged(); + return this; + } + + private int bitField0_; + + /** + * string string_value = 1; + * @return Whether the stringValue field is set. + */ + @java.lang.Override + public boolean hasStringValue() { + return optionValueCase_ == 1; + } + /** + * string string_value = 1; + * @return The stringValue. + */ + @java.lang.Override + public java.lang.String getStringValue() { + java.lang.Object ref = ""; + if (optionValueCase_ == 1) { + ref = optionValue_; + } + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (optionValueCase_ == 1) { + optionValue_ = s; + } + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string string_value = 1; + * @return The bytes for stringValue. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getStringValueBytes() { + java.lang.Object ref = ""; + if (optionValueCase_ == 1) { + ref = optionValue_; + } + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + if (optionValueCase_ == 1) { + optionValue_ = b; + } + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string string_value = 1; + * @param value The stringValue to set. + * @return This builder for chaining. + */ + public Builder setStringValue( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + optionValueCase_ = 1; + optionValue_ = value; + onChanged(); + return this; + } + /** + * string string_value = 1; + * @return This builder for chaining. + */ + public Builder clearStringValue() { + if (optionValueCase_ == 1) { + optionValueCase_ = 0; + optionValue_ = null; + onChanged(); + } + return this; + } + /** + * string string_value = 1; + * @param value The bytes for stringValue to set. + * @return This builder for chaining. + */ + public Builder setStringValueBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + optionValueCase_ = 1; + optionValue_ = value; + onChanged(); + return this; + } + + /** + * bool bool_value = 2; + * @return Whether the boolValue field is set. + */ + public boolean hasBoolValue() { + return optionValueCase_ == 2; + } + /** + * bool bool_value = 2; + * @return The boolValue. + */ + public boolean getBoolValue() { + if (optionValueCase_ == 2) { + return (java.lang.Boolean) optionValue_; + } + return false; + } + /** + * bool bool_value = 2; + * @param value The boolValue to set. + * @return This builder for chaining. + */ + public Builder setBoolValue(boolean value) { + + optionValueCase_ = 2; + optionValue_ = value; + onChanged(); + return this; + } + /** + * bool bool_value = 2; + * @return This builder for chaining. + */ + public Builder clearBoolValue() { + if (optionValueCase_ == 2) { + optionValueCase_ = 0; + optionValue_ = null; + onChanged(); + } + return this; + } + + /** + * sfixed64 int64_value = 3; + * @return Whether the int64Value field is set. + */ + public boolean hasInt64Value() { + return optionValueCase_ == 3; + } + /** + * sfixed64 int64_value = 3; + * @return The int64Value. + */ + public long getInt64Value() { + if (optionValueCase_ == 3) { + return (java.lang.Long) optionValue_; + } + return 0L; + } + /** + * sfixed64 int64_value = 3; + * @param value The int64Value to set. + * @return This builder for chaining. + */ + public Builder setInt64Value(long value) { + + optionValueCase_ = 3; + optionValue_ = value; + onChanged(); + return this; + } + /** + * sfixed64 int64_value = 3; + * @return This builder for chaining. + */ + public Builder clearInt64Value() { + if (optionValueCase_ == 3) { + optionValueCase_ = 0; + optionValue_ = null; + onChanged(); + } + return this; + } + + /** + * double double_value = 4; + * @return Whether the doubleValue field is set. + */ + public boolean hasDoubleValue() { + return optionValueCase_ == 4; + } + /** + * double double_value = 4; + * @return The doubleValue. + */ + public double getDoubleValue() { + if (optionValueCase_ == 4) { + return (java.lang.Double) optionValue_; + } + return 0D; + } + /** + * double double_value = 4; + * @param value The doubleValue to set. + * @return This builder for chaining. + */ + public Builder setDoubleValue(double value) { + + optionValueCase_ = 4; + optionValue_ = value; + onChanged(); + return this; + } + /** + * double double_value = 4; + * @return This builder for chaining. + */ + public Builder clearDoubleValue() { + if (optionValueCase_ == 4) { + optionValueCase_ = 0; + optionValue_ = null; + onChanged(); + } + return this; + } + + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue, org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue.Builder, org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValueOrBuilder> stringListValueBuilder_; + /** + * .arrow.flight.protocol.SessionOptionValue.StringListValue string_list_value = 5; + * @return Whether the stringListValue field is set. + */ + @java.lang.Override + public boolean hasStringListValue() { + return optionValueCase_ == 5; + } + /** + * .arrow.flight.protocol.SessionOptionValue.StringListValue string_list_value = 5; + * @return The stringListValue. + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue getStringListValue() { + if (stringListValueBuilder_ == null) { + if (optionValueCase_ == 5) { + return (org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue) optionValue_; + } + return org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue.getDefaultInstance(); + } else { + if (optionValueCase_ == 5) { + return stringListValueBuilder_.getMessage(); + } + return org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue.getDefaultInstance(); + } + } + /** + * .arrow.flight.protocol.SessionOptionValue.StringListValue string_list_value = 5; + */ + public Builder setStringListValue(org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue value) { + if (stringListValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + optionValue_ = value; + onChanged(); + } else { + stringListValueBuilder_.setMessage(value); + } + optionValueCase_ = 5; + return this; + } + /** + * .arrow.flight.protocol.SessionOptionValue.StringListValue string_list_value = 5; + */ + public Builder setStringListValue( + org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue.Builder builderForValue) { + if (stringListValueBuilder_ == null) { + optionValue_ = builderForValue.build(); + onChanged(); + } else { + stringListValueBuilder_.setMessage(builderForValue.build()); + } + optionValueCase_ = 5; + return this; + } + /** + * .arrow.flight.protocol.SessionOptionValue.StringListValue string_list_value = 5; + */ + public Builder mergeStringListValue(org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue value) { + if (stringListValueBuilder_ == null) { + if (optionValueCase_ == 5 && + optionValue_ != org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue.getDefaultInstance()) { + optionValue_ = org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue.newBuilder((org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue) optionValue_) + .mergeFrom(value).buildPartial(); + } else { + optionValue_ = value; + } + onChanged(); + } else { + if (optionValueCase_ == 5) { + stringListValueBuilder_.mergeFrom(value); + } else { + stringListValueBuilder_.setMessage(value); + } + } + optionValueCase_ = 5; + return this; + } + /** + * .arrow.flight.protocol.SessionOptionValue.StringListValue string_list_value = 5; + */ + public Builder clearStringListValue() { + if (stringListValueBuilder_ == null) { + if (optionValueCase_ == 5) { + optionValueCase_ = 0; + optionValue_ = null; + onChanged(); + } + } else { + if (optionValueCase_ == 5) { + optionValueCase_ = 0; + optionValue_ = null; + } + stringListValueBuilder_.clear(); + } + return this; + } + /** + * .arrow.flight.protocol.SessionOptionValue.StringListValue string_list_value = 5; + */ + public org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue.Builder getStringListValueBuilder() { + return getStringListValueFieldBuilder().getBuilder(); + } + /** + * .arrow.flight.protocol.SessionOptionValue.StringListValue string_list_value = 5; + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValueOrBuilder getStringListValueOrBuilder() { + if ((optionValueCase_ == 5) && (stringListValueBuilder_ != null)) { + return stringListValueBuilder_.getMessageOrBuilder(); + } else { + if (optionValueCase_ == 5) { + return (org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue) optionValue_; + } + return org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue.getDefaultInstance(); + } + } + /** + * .arrow.flight.protocol.SessionOptionValue.StringListValue string_list_value = 5; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue, org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue.Builder, org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValueOrBuilder> + getStringListValueFieldBuilder() { + if (stringListValueBuilder_ == null) { + if (!(optionValueCase_ == 5)) { + optionValue_ = org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue.getDefaultInstance(); + } + stringListValueBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue, org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue.Builder, org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValueOrBuilder>( + (org.apache.arrow.flight.impl.Flight.SessionOptionValue.StringListValue) optionValue_, + getParentForChildren(), + isClean()); + optionValue_ = null; + } + optionValueCase_ = 5; + onChanged(); + return stringListValueBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.SessionOptionValue) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.SessionOptionValue) + private static final org.apache.arrow.flight.impl.Flight.SessionOptionValue DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.SessionOptionValue(); + } + + public static org.apache.arrow.flight.impl.Flight.SessionOptionValue getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public SessionOptionValue parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SessionOptionValue getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface SetSessionOptionsRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.SetSessionOptionsRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + int getSessionOptionsCount(); + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + boolean containsSessionOptions( + java.lang.String key); + /** + * Use {@link #getSessionOptionsMap()} instead. + */ + @java.lang.Deprecated + java.util.Map + getSessionOptions(); + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + java.util.Map + getSessionOptionsMap(); + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + /* nullable */ +org.apache.arrow.flight.impl.Flight.SessionOptionValue getSessionOptionsOrDefault( + java.lang.String key, + /* nullable */ +org.apache.arrow.flight.impl.Flight.SessionOptionValue defaultValue); + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + org.apache.arrow.flight.impl.Flight.SessionOptionValue getSessionOptionsOrThrow( + java.lang.String key); + } + /** + *
+   *
+   * EXPERIMENTAL: A request to set session options for an existing or new (implicit)
+   * server session.
+   *
+   * Sessions are persisted and referenced via a transport-level state management, typically
+   * RFC 6265 HTTP cookies when using an HTTP transport.  The suggested cookie name or state
+   * context key is 'arrow_flight_session_id', although implementations may freely choose their
+   * own name.
+   *
+   * Session creation (if one does not already exist) is implied by this RPC request, however
+   * server implementations may choose to initiate a session that also contains client-provided
+   * session options at any other time, e.g. on authentication, or when any other call is made
+   * and the server wishes to use a session to persist any state (or lack thereof).
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.SetSessionOptionsRequest} + */ + public static final class SetSessionOptionsRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.SetSessionOptionsRequest) + SetSessionOptionsRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use SetSessionOptionsRequest.newBuilder() to construct. + private SetSessionOptionsRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private SetSessionOptionsRequest() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new SetSessionOptionsRequest(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SetSessionOptionsRequest_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + @java.lang.Override + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 1: + return internalGetSessionOptions(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SetSessionOptionsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest.class, org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest.Builder.class); + } + + public static final int SESSION_OPTIONS_FIELD_NUMBER = 1; + private static final class SessionOptionsDefaultEntryHolder { + static final com.google.protobuf.MapEntry< + java.lang.String, org.apache.arrow.flight.impl.Flight.SessionOptionValue> defaultEntry = + com.google.protobuf.MapEntry + .newDefaultInstance( + org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SetSessionOptionsRequest_SessionOptionsEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.MESSAGE, + org.apache.arrow.flight.impl.Flight.SessionOptionValue.getDefaultInstance()); + } + @SuppressWarnings("serial") + private com.google.protobuf.MapField< + java.lang.String, org.apache.arrow.flight.impl.Flight.SessionOptionValue> sessionOptions_; + private com.google.protobuf.MapField + internalGetSessionOptions() { + if (sessionOptions_ == null) { + return com.google.protobuf.MapField.emptyMapField( + SessionOptionsDefaultEntryHolder.defaultEntry); + } + return sessionOptions_; + } + public int getSessionOptionsCount() { + return internalGetSessionOptions().getMap().size(); + } + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + @java.lang.Override + public boolean containsSessionOptions( + java.lang.String key) { + if (key == null) { throw new NullPointerException("map key"); } + return internalGetSessionOptions().getMap().containsKey(key); + } + /** + * Use {@link #getSessionOptionsMap()} instead. + */ + @java.lang.Override + @java.lang.Deprecated + public java.util.Map getSessionOptions() { + return getSessionOptionsMap(); + } + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + @java.lang.Override + public java.util.Map getSessionOptionsMap() { + return internalGetSessionOptions().getMap(); + } + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + @java.lang.Override + public /* nullable */ +org.apache.arrow.flight.impl.Flight.SessionOptionValue getSessionOptionsOrDefault( + java.lang.String key, + /* nullable */ +org.apache.arrow.flight.impl.Flight.SessionOptionValue defaultValue) { + if (key == null) { throw new NullPointerException("map key"); } + java.util.Map map = + internalGetSessionOptions().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SessionOptionValue getSessionOptionsOrThrow( + java.lang.String key) { + if (key == null) { throw new NullPointerException("map key"); } + java.util.Map map = + internalGetSessionOptions().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + com.google.protobuf.GeneratedMessageV3 + .serializeStringMapTo( + output, + internalGetSessionOptions(), + SessionOptionsDefaultEntryHolder.defaultEntry, + 1); + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (java.util.Map.Entry entry + : internalGetSessionOptions().getMap().entrySet()) { + com.google.protobuf.MapEntry + sessionOptions__ = SessionOptionsDefaultEntryHolder.defaultEntry.newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, sessionOptions__); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest other = (org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest) obj; + + if (!internalGetSessionOptions().equals( + other.internalGetSessionOptions())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (!internalGetSessionOptions().getMap().isEmpty()) { + hash = (37 * hash) + SESSION_OPTIONS_FIELD_NUMBER; + hash = (53 * hash) + internalGetSessionOptions().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * EXPERIMENTAL: A request to set session options for an existing or new (implicit)
+     * server session.
+     *
+     * Sessions are persisted and referenced via a transport-level state management, typically
+     * RFC 6265 HTTP cookies when using an HTTP transport.  The suggested cookie name or state
+     * context key is 'arrow_flight_session_id', although implementations may freely choose their
+     * own name.
+     *
+     * Session creation (if one does not already exist) is implied by this RPC request, however
+     * server implementations may choose to initiate a session that also contains client-provided
+     * session options at any other time, e.g. on authentication, or when any other call is made
+     * and the server wishes to use a session to persist any state (or lack thereof).
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.SetSessionOptionsRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.SetSessionOptionsRequest) + org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SetSessionOptionsRequest_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 1: + return internalGetSessionOptions(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMutableMapField( + int number) { + switch (number) { + case 1: + return internalGetMutableSessionOptions(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SetSessionOptionsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest.class, org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + internalGetMutableSessionOptions().clear(); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SetSessionOptionsRequest_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest build() { + org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest buildPartial() { + org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest result = new org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.sessionOptions_ = internalGetSessionOptions(); + result.sessionOptions_.makeImmutable(); + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest other) { + if (other == org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest.getDefaultInstance()) return this; + internalGetMutableSessionOptions().mergeFrom( + other.internalGetSessionOptions()); + bitField0_ |= 0x00000001; + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + com.google.protobuf.MapEntry + sessionOptions__ = input.readMessage( + SessionOptionsDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); + internalGetMutableSessionOptions().getMutableMap().put( + sessionOptions__.getKey(), sessionOptions__.getValue()); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.MapField< + java.lang.String, org.apache.arrow.flight.impl.Flight.SessionOptionValue> sessionOptions_; + private com.google.protobuf.MapField + internalGetSessionOptions() { + if (sessionOptions_ == null) { + return com.google.protobuf.MapField.emptyMapField( + SessionOptionsDefaultEntryHolder.defaultEntry); + } + return sessionOptions_; + } + private com.google.protobuf.MapField + internalGetMutableSessionOptions() { + if (sessionOptions_ == null) { + sessionOptions_ = com.google.protobuf.MapField.newMapField( + SessionOptionsDefaultEntryHolder.defaultEntry); + } + if (!sessionOptions_.isMutable()) { + sessionOptions_ = sessionOptions_.copy(); + } + bitField0_ |= 0x00000001; + onChanged(); + return sessionOptions_; + } + public int getSessionOptionsCount() { + return internalGetSessionOptions().getMap().size(); + } + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + @java.lang.Override + public boolean containsSessionOptions( + java.lang.String key) { + if (key == null) { throw new NullPointerException("map key"); } + return internalGetSessionOptions().getMap().containsKey(key); + } + /** + * Use {@link #getSessionOptionsMap()} instead. + */ + @java.lang.Override + @java.lang.Deprecated + public java.util.Map getSessionOptions() { + return getSessionOptionsMap(); + } + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + @java.lang.Override + public java.util.Map getSessionOptionsMap() { + return internalGetSessionOptions().getMap(); + } + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + @java.lang.Override + public /* nullable */ +org.apache.arrow.flight.impl.Flight.SessionOptionValue getSessionOptionsOrDefault( + java.lang.String key, + /* nullable */ +org.apache.arrow.flight.impl.Flight.SessionOptionValue defaultValue) { + if (key == null) { throw new NullPointerException("map key"); } + java.util.Map map = + internalGetSessionOptions().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SessionOptionValue getSessionOptionsOrThrow( + java.lang.String key) { + if (key == null) { throw new NullPointerException("map key"); } + java.util.Map map = + internalGetSessionOptions().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + public Builder clearSessionOptions() { + bitField0_ = (bitField0_ & ~0x00000001); + internalGetMutableSessionOptions().getMutableMap() + .clear(); + return this; + } + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + public Builder removeSessionOptions( + java.lang.String key) { + if (key == null) { throw new NullPointerException("map key"); } + internalGetMutableSessionOptions().getMutableMap() + .remove(key); + return this; + } + /** + * Use alternate mutation accessors instead. + */ + @java.lang.Deprecated + public java.util.Map + getMutableSessionOptions() { + bitField0_ |= 0x00000001; + return internalGetMutableSessionOptions().getMutableMap(); + } + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + public Builder putSessionOptions( + java.lang.String key, + org.apache.arrow.flight.impl.Flight.SessionOptionValue value) { + if (key == null) { throw new NullPointerException("map key"); } + if (value == null) { throw new NullPointerException("map value"); } + internalGetMutableSessionOptions().getMutableMap() + .put(key, value); + bitField0_ |= 0x00000001; + return this; + } + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + public Builder putAllSessionOptions( + java.util.Map values) { + internalGetMutableSessionOptions().getMutableMap() + .putAll(values); + bitField0_ |= 0x00000001; + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.SetSessionOptionsRequest) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.SetSessionOptionsRequest) + private static final org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest(); + } + + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public SetSessionOptionsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SetSessionOptionsRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface SetSessionOptionsResultOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.SetSessionOptionsResult) + com.google.protobuf.MessageOrBuilder { + + /** + * map<string, .arrow.flight.protocol.SetSessionOptionsResult.Error> errors = 1; + */ + int getErrorsCount(); + /** + * map<string, .arrow.flight.protocol.SetSessionOptionsResult.Error> errors = 1; + */ + boolean containsErrors( + java.lang.String key); + /** + * Use {@link #getErrorsMap()} instead. + */ + @java.lang.Deprecated + java.util.Map + getErrors(); + /** + * map<string, .arrow.flight.protocol.SetSessionOptionsResult.Error> errors = 1; + */ + java.util.Map + getErrorsMap(); + /** + * map<string, .arrow.flight.protocol.SetSessionOptionsResult.Error> errors = 1; + */ + /* nullable */ +org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error getErrorsOrDefault( + java.lang.String key, + /* nullable */ +org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error defaultValue); + /** + * map<string, .arrow.flight.protocol.SetSessionOptionsResult.Error> errors = 1; + */ + org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error getErrorsOrThrow( + java.lang.String key); + } + /** + *
+   *
+   * EXPERIMENTAL: The results (individually) of setting a set of session options.
+   *
+   * Option names should only be present in the response if they were not successfully
+   * set on the server; that is, a response without an Error for a name provided in the
+   * SetSessionOptionsRequest implies that the named option value was set successfully.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.SetSessionOptionsResult} + */ + public static final class SetSessionOptionsResult extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.SetSessionOptionsResult) + SetSessionOptionsResultOrBuilder { + private static final long serialVersionUID = 0L; + // Use SetSessionOptionsResult.newBuilder() to construct. + private SetSessionOptionsResult(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private SetSessionOptionsResult() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new SetSessionOptionsResult(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SetSessionOptionsResult_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + @java.lang.Override + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 1: + return internalGetErrors(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SetSessionOptionsResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.class, org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Builder.class); + } + + /** + * Protobuf enum {@code arrow.flight.protocol.SetSessionOptionsResult.ErrorValue} + */ + public enum ErrorValue + implements com.google.protobuf.ProtocolMessageEnum { + /** + *
+       * Protobuf deserialization fallback value: The status is unknown or unrecognized.
+       * Servers should avoid using this value. The request may be retried by the client.
+       * 
+ * + * UNSPECIFIED = 0; + */ + UNSPECIFIED(0), + /** + *
+       * The given session option name is invalid.
+       * 
+ * + * INVALID_NAME = 1; + */ + INVALID_NAME(1), + /** + *
+       * The session option value or type is invalid.
+       * 
+ * + * INVALID_VALUE = 2; + */ + INVALID_VALUE(2), + /** + *
+       * The session option cannot be set.
+       * 
+ * + * ERROR = 3; + */ + ERROR(3), + UNRECOGNIZED(-1), + ; + + /** + *
+       * Protobuf deserialization fallback value: The status is unknown or unrecognized.
+       * Servers should avoid using this value. The request may be retried by the client.
+       * 
+ * + * UNSPECIFIED = 0; + */ + public static final int UNSPECIFIED_VALUE = 0; + /** + *
+       * The given session option name is invalid.
+       * 
+ * + * INVALID_NAME = 1; + */ + public static final int INVALID_NAME_VALUE = 1; + /** + *
+       * The session option value or type is invalid.
+       * 
+ * + * INVALID_VALUE = 2; + */ + public static final int INVALID_VALUE_VALUE = 2; + /** + *
+       * The session option cannot be set.
+       * 
+ * + * ERROR = 3; + */ + public static final int ERROR_VALUE = 3; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static ErrorValue valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static ErrorValue forNumber(int value) { + switch (value) { + case 0: return UNSPECIFIED; + case 1: return INVALID_NAME; + case 2: return INVALID_VALUE; + case 3: return ERROR; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + ErrorValue> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public ErrorValue findValueByNumber(int number) { + return ErrorValue.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.getDescriptor().getEnumTypes().get(0); + } + + private static final ErrorValue[] VALUES = values(); + + public static ErrorValue valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private ErrorValue(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.SetSessionOptionsResult.ErrorValue) + } + + public interface ErrorOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.SetSessionOptionsResult.Error) + com.google.protobuf.MessageOrBuilder { + + /** + * .arrow.flight.protocol.SetSessionOptionsResult.ErrorValue value = 1; + * @return The enum numeric value on the wire for value. + */ + int getValueValue(); + /** + * .arrow.flight.protocol.SetSessionOptionsResult.ErrorValue value = 1; + * @return The value. + */ + org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.ErrorValue getValue(); + } + /** + * Protobuf type {@code arrow.flight.protocol.SetSessionOptionsResult.Error} + */ + public static final class Error extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.SetSessionOptionsResult.Error) + ErrorOrBuilder { + private static final long serialVersionUID = 0L; + // Use Error.newBuilder() to construct. + private Error(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Error() { + value_ = 0; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new Error(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SetSessionOptionsResult_Error_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SetSessionOptionsResult_Error_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error.class, org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error.Builder.class); + } + + public static final int VALUE_FIELD_NUMBER = 1; + private int value_ = 0; + /** + * .arrow.flight.protocol.SetSessionOptionsResult.ErrorValue value = 1; + * @return The enum numeric value on the wire for value. + */ + @java.lang.Override public int getValueValue() { + return value_; + } + /** + * .arrow.flight.protocol.SetSessionOptionsResult.ErrorValue value = 1; + * @return The value. + */ + @java.lang.Override public org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.ErrorValue getValue() { + org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.ErrorValue result = org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.ErrorValue.forNumber(value_); + return result == null ? org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.ErrorValue.UNRECOGNIZED : result; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (value_ != org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.ErrorValue.UNSPECIFIED.getNumber()) { + output.writeEnum(1, value_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (value_ != org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.ErrorValue.UNSPECIFIED.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(1, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error other = (org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error) obj; + + if (value_ != other.value_) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + value_; + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code arrow.flight.protocol.SetSessionOptionsResult.Error} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.SetSessionOptionsResult.Error) + org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.ErrorOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SetSessionOptionsResult_Error_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SetSessionOptionsResult_Error_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error.class, org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + value_ = 0; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SetSessionOptionsResult_Error_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error build() { + org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error buildPartial() { + org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error result = new org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.value_ = value_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error other) { + if (other == org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error.getDefaultInstance()) return this; + if (other.value_ != 0) { + setValueValue(other.getValueValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + value_ = input.readEnum(); + bitField0_ |= 0x00000001; + break; + } // case 8 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private int value_ = 0; + /** + * .arrow.flight.protocol.SetSessionOptionsResult.ErrorValue value = 1; + * @return The enum numeric value on the wire for value. + */ + @java.lang.Override public int getValueValue() { + return value_; + } + /** + * .arrow.flight.protocol.SetSessionOptionsResult.ErrorValue value = 1; + * @param value The enum numeric value on the wire for value to set. + * @return This builder for chaining. + */ + public Builder setValueValue(int value) { + value_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * .arrow.flight.protocol.SetSessionOptionsResult.ErrorValue value = 1; + * @return The value. + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.ErrorValue getValue() { + org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.ErrorValue result = org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.ErrorValue.forNumber(value_); + return result == null ? org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.ErrorValue.UNRECOGNIZED : result; + } + /** + * .arrow.flight.protocol.SetSessionOptionsResult.ErrorValue value = 1; + * @param value The value to set. + * @return This builder for chaining. + */ + public Builder setValue(org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.ErrorValue value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + value_ = value.getNumber(); + onChanged(); + return this; + } + /** + * .arrow.flight.protocol.SetSessionOptionsResult.ErrorValue value = 1; + * @return This builder for chaining. + */ + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000001); + value_ = 0; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.SetSessionOptionsResult.Error) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.SetSessionOptionsResult.Error) + private static final org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error(); + } + + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Error parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public static final int ERRORS_FIELD_NUMBER = 1; + private static final class ErrorsDefaultEntryHolder { + static final com.google.protobuf.MapEntry< + java.lang.String, org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error> defaultEntry = + com.google.protobuf.MapEntry + .newDefaultInstance( + org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SetSessionOptionsResult_ErrorsEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.MESSAGE, + org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error.getDefaultInstance()); + } + @SuppressWarnings("serial") + private com.google.protobuf.MapField< + java.lang.String, org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error> errors_; + private com.google.protobuf.MapField + internalGetErrors() { + if (errors_ == null) { + return com.google.protobuf.MapField.emptyMapField( + ErrorsDefaultEntryHolder.defaultEntry); + } + return errors_; + } + public int getErrorsCount() { + return internalGetErrors().getMap().size(); + } + /** + * map<string, .arrow.flight.protocol.SetSessionOptionsResult.Error> errors = 1; + */ + @java.lang.Override + public boolean containsErrors( + java.lang.String key) { + if (key == null) { throw new NullPointerException("map key"); } + return internalGetErrors().getMap().containsKey(key); + } + /** + * Use {@link #getErrorsMap()} instead. + */ + @java.lang.Override + @java.lang.Deprecated + public java.util.Map getErrors() { + return getErrorsMap(); + } + /** + * map<string, .arrow.flight.protocol.SetSessionOptionsResult.Error> errors = 1; + */ + @java.lang.Override + public java.util.Map getErrorsMap() { + return internalGetErrors().getMap(); + } + /** + * map<string, .arrow.flight.protocol.SetSessionOptionsResult.Error> errors = 1; + */ + @java.lang.Override + public /* nullable */ +org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error getErrorsOrDefault( + java.lang.String key, + /* nullable */ +org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error defaultValue) { + if (key == null) { throw new NullPointerException("map key"); } + java.util.Map map = + internalGetErrors().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + * map<string, .arrow.flight.protocol.SetSessionOptionsResult.Error> errors = 1; + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error getErrorsOrThrow( + java.lang.String key) { + if (key == null) { throw new NullPointerException("map key"); } + java.util.Map map = + internalGetErrors().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + com.google.protobuf.GeneratedMessageV3 + .serializeStringMapTo( + output, + internalGetErrors(), + ErrorsDefaultEntryHolder.defaultEntry, + 1); + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (java.util.Map.Entry entry + : internalGetErrors().getMap().entrySet()) { + com.google.protobuf.MapEntry + errors__ = ErrorsDefaultEntryHolder.defaultEntry.newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, errors__); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult other = (org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult) obj; + + if (!internalGetErrors().equals( + other.internalGetErrors())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (!internalGetErrors().getMap().isEmpty()) { + hash = (37 * hash) + ERRORS_FIELD_NUMBER; + hash = (53 * hash) + internalGetErrors().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * EXPERIMENTAL: The results (individually) of setting a set of session options.
+     *
+     * Option names should only be present in the response if they were not successfully
+     * set on the server; that is, a response without an Error for a name provided in the
+     * SetSessionOptionsRequest implies that the named option value was set successfully.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.SetSessionOptionsResult} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.SetSessionOptionsResult) + org.apache.arrow.flight.impl.Flight.SetSessionOptionsResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SetSessionOptionsResult_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 1: + return internalGetErrors(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMutableMapField( + int number) { + switch (number) { + case 1: + return internalGetMutableErrors(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SetSessionOptionsResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.class, org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + internalGetMutableErrors().clear(); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_SetSessionOptionsResult_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult build() { + org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult buildPartial() { + org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult result = new org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.errors_ = internalGetErrors(); + result.errors_.makeImmutable(); + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult other) { + if (other == org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.getDefaultInstance()) return this; + internalGetMutableErrors().mergeFrom( + other.internalGetErrors()); + bitField0_ |= 0x00000001; + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + com.google.protobuf.MapEntry + errors__ = input.readMessage( + ErrorsDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); + internalGetMutableErrors().getMutableMap().put( + errors__.getKey(), errors__.getValue()); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.MapField< + java.lang.String, org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error> errors_; + private com.google.protobuf.MapField + internalGetErrors() { + if (errors_ == null) { + return com.google.protobuf.MapField.emptyMapField( + ErrorsDefaultEntryHolder.defaultEntry); + } + return errors_; + } + private com.google.protobuf.MapField + internalGetMutableErrors() { + if (errors_ == null) { + errors_ = com.google.protobuf.MapField.newMapField( + ErrorsDefaultEntryHolder.defaultEntry); + } + if (!errors_.isMutable()) { + errors_ = errors_.copy(); + } + bitField0_ |= 0x00000001; + onChanged(); + return errors_; + } + public int getErrorsCount() { + return internalGetErrors().getMap().size(); + } + /** + * map<string, .arrow.flight.protocol.SetSessionOptionsResult.Error> errors = 1; + */ + @java.lang.Override + public boolean containsErrors( + java.lang.String key) { + if (key == null) { throw new NullPointerException("map key"); } + return internalGetErrors().getMap().containsKey(key); + } + /** + * Use {@link #getErrorsMap()} instead. + */ + @java.lang.Override + @java.lang.Deprecated + public java.util.Map getErrors() { + return getErrorsMap(); + } + /** + * map<string, .arrow.flight.protocol.SetSessionOptionsResult.Error> errors = 1; + */ + @java.lang.Override + public java.util.Map getErrorsMap() { + return internalGetErrors().getMap(); + } + /** + * map<string, .arrow.flight.protocol.SetSessionOptionsResult.Error> errors = 1; + */ + @java.lang.Override + public /* nullable */ +org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error getErrorsOrDefault( + java.lang.String key, + /* nullable */ +org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error defaultValue) { + if (key == null) { throw new NullPointerException("map key"); } + java.util.Map map = + internalGetErrors().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + * map<string, .arrow.flight.protocol.SetSessionOptionsResult.Error> errors = 1; + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error getErrorsOrThrow( + java.lang.String key) { + if (key == null) { throw new NullPointerException("map key"); } + java.util.Map map = + internalGetErrors().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + public Builder clearErrors() { + bitField0_ = (bitField0_ & ~0x00000001); + internalGetMutableErrors().getMutableMap() + .clear(); + return this; + } + /** + * map<string, .arrow.flight.protocol.SetSessionOptionsResult.Error> errors = 1; + */ + public Builder removeErrors( + java.lang.String key) { + if (key == null) { throw new NullPointerException("map key"); } + internalGetMutableErrors().getMutableMap() + .remove(key); + return this; + } + /** + * Use alternate mutation accessors instead. + */ + @java.lang.Deprecated + public java.util.Map + getMutableErrors() { + bitField0_ |= 0x00000001; + return internalGetMutableErrors().getMutableMap(); + } + /** + * map<string, .arrow.flight.protocol.SetSessionOptionsResult.Error> errors = 1; + */ + public Builder putErrors( + java.lang.String key, + org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult.Error value) { + if (key == null) { throw new NullPointerException("map key"); } + if (value == null) { throw new NullPointerException("map value"); } + internalGetMutableErrors().getMutableMap() + .put(key, value); + bitField0_ |= 0x00000001; + return this; + } + /** + * map<string, .arrow.flight.protocol.SetSessionOptionsResult.Error> errors = 1; + */ + public Builder putAllErrors( + java.util.Map values) { + internalGetMutableErrors().getMutableMap() + .putAll(values); + bitField0_ |= 0x00000001; + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.SetSessionOptionsResult) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.SetSessionOptionsResult) + private static final org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult(); + } + + public static org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public SetSessionOptionsResult parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SetSessionOptionsResult getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface GetSessionOptionsRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.GetSessionOptionsRequest) + com.google.protobuf.MessageOrBuilder { + } + /** + *
+   *
+   * EXPERIMENTAL: A request to access the session options for the current server session.
+   *
+   * The existing session is referenced via a cookie header or similar (see
+   * SetSessionOptionsRequest above); it is an error to make this request with a missing,
+   * invalid, or expired session cookie header or other implementation-defined session
+   * reference token.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.GetSessionOptionsRequest} + */ + public static final class GetSessionOptionsRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.GetSessionOptionsRequest) + GetSessionOptionsRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use GetSessionOptionsRequest.newBuilder() to construct. + private GetSessionOptionsRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private GetSessionOptionsRequest() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new GetSessionOptionsRequest(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_GetSessionOptionsRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_GetSessionOptionsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest.class, org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest.Builder.class); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest other = (org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest) obj; + + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * EXPERIMENTAL: A request to access the session options for the current server session.
+     *
+     * The existing session is referenced via a cookie header or similar (see
+     * SetSessionOptionsRequest above); it is an error to make this request with a missing,
+     * invalid, or expired session cookie header or other implementation-defined session
+     * reference token.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.GetSessionOptionsRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.GetSessionOptionsRequest) + org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_GetSessionOptionsRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_GetSessionOptionsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest.class, org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_GetSessionOptionsRequest_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest build() { + org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest buildPartial() { + org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest result = new org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest(this); + onBuilt(); + return result; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest other) { + if (other == org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.GetSessionOptionsRequest) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.GetSessionOptionsRequest) + private static final org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest(); + } + + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public GetSessionOptionsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.GetSessionOptionsRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface GetSessionOptionsResultOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.GetSessionOptionsResult) + com.google.protobuf.MessageOrBuilder { + + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + int getSessionOptionsCount(); + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + boolean containsSessionOptions( + java.lang.String key); + /** + * Use {@link #getSessionOptionsMap()} instead. + */ + @java.lang.Deprecated + java.util.Map + getSessionOptions(); + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + java.util.Map + getSessionOptionsMap(); + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + /* nullable */ +org.apache.arrow.flight.impl.Flight.SessionOptionValue getSessionOptionsOrDefault( + java.lang.String key, + /* nullable */ +org.apache.arrow.flight.impl.Flight.SessionOptionValue defaultValue); + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + org.apache.arrow.flight.impl.Flight.SessionOptionValue getSessionOptionsOrThrow( + java.lang.String key); + } + /** + *
+   *
+   * EXPERIMENTAL: The result containing the current server session options.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.GetSessionOptionsResult} + */ + public static final class GetSessionOptionsResult extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.GetSessionOptionsResult) + GetSessionOptionsResultOrBuilder { + private static final long serialVersionUID = 0L; + // Use GetSessionOptionsResult.newBuilder() to construct. + private GetSessionOptionsResult(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private GetSessionOptionsResult() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new GetSessionOptionsResult(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_GetSessionOptionsResult_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + @java.lang.Override + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 1: + return internalGetSessionOptions(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_GetSessionOptionsResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult.class, org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult.Builder.class); + } + + public static final int SESSION_OPTIONS_FIELD_NUMBER = 1; + private static final class SessionOptionsDefaultEntryHolder { + static final com.google.protobuf.MapEntry< + java.lang.String, org.apache.arrow.flight.impl.Flight.SessionOptionValue> defaultEntry = + com.google.protobuf.MapEntry + .newDefaultInstance( + org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_GetSessionOptionsResult_SessionOptionsEntry_descriptor, + com.google.protobuf.WireFormat.FieldType.STRING, + "", + com.google.protobuf.WireFormat.FieldType.MESSAGE, + org.apache.arrow.flight.impl.Flight.SessionOptionValue.getDefaultInstance()); + } + @SuppressWarnings("serial") + private com.google.protobuf.MapField< + java.lang.String, org.apache.arrow.flight.impl.Flight.SessionOptionValue> sessionOptions_; + private com.google.protobuf.MapField + internalGetSessionOptions() { + if (sessionOptions_ == null) { + return com.google.protobuf.MapField.emptyMapField( + SessionOptionsDefaultEntryHolder.defaultEntry); + } + return sessionOptions_; + } + public int getSessionOptionsCount() { + return internalGetSessionOptions().getMap().size(); + } + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + @java.lang.Override + public boolean containsSessionOptions( + java.lang.String key) { + if (key == null) { throw new NullPointerException("map key"); } + return internalGetSessionOptions().getMap().containsKey(key); + } + /** + * Use {@link #getSessionOptionsMap()} instead. + */ + @java.lang.Override + @java.lang.Deprecated + public java.util.Map getSessionOptions() { + return getSessionOptionsMap(); + } + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + @java.lang.Override + public java.util.Map getSessionOptionsMap() { + return internalGetSessionOptions().getMap(); + } + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + @java.lang.Override + public /* nullable */ +org.apache.arrow.flight.impl.Flight.SessionOptionValue getSessionOptionsOrDefault( + java.lang.String key, + /* nullable */ +org.apache.arrow.flight.impl.Flight.SessionOptionValue defaultValue) { + if (key == null) { throw new NullPointerException("map key"); } + java.util.Map map = + internalGetSessionOptions().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SessionOptionValue getSessionOptionsOrThrow( + java.lang.String key) { + if (key == null) { throw new NullPointerException("map key"); } + java.util.Map map = + internalGetSessionOptions().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + com.google.protobuf.GeneratedMessageV3 + .serializeStringMapTo( + output, + internalGetSessionOptions(), + SessionOptionsDefaultEntryHolder.defaultEntry, + 1); + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (java.util.Map.Entry entry + : internalGetSessionOptions().getMap().entrySet()) { + com.google.protobuf.MapEntry + sessionOptions__ = SessionOptionsDefaultEntryHolder.defaultEntry.newBuilderForType() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build(); + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, sessionOptions__); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult other = (org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult) obj; + + if (!internalGetSessionOptions().equals( + other.internalGetSessionOptions())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (!internalGetSessionOptions().getMap().isEmpty()) { + hash = (37 * hash) + SESSION_OPTIONS_FIELD_NUMBER; + hash = (53 * hash) + internalGetSessionOptions().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * EXPERIMENTAL: The result containing the current server session options.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.GetSessionOptionsResult} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.GetSessionOptionsResult) + org.apache.arrow.flight.impl.Flight.GetSessionOptionsResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_GetSessionOptionsResult_descriptor; + } + + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMapField( + int number) { + switch (number) { + case 1: + return internalGetSessionOptions(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @SuppressWarnings({"rawtypes"}) + protected com.google.protobuf.MapField internalGetMutableMapField( + int number) { + switch (number) { + case 1: + return internalGetMutableSessionOptions(); + default: + throw new RuntimeException( + "Invalid map field number: " + number); + } + } + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_GetSessionOptionsResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult.class, org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + internalGetMutableSessionOptions().clear(); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_GetSessionOptionsResult_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult build() { + org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult buildPartial() { + org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult result = new org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.sessionOptions_ = internalGetSessionOptions(); + result.sessionOptions_.makeImmutable(); + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult other) { + if (other == org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult.getDefaultInstance()) return this; + internalGetMutableSessionOptions().mergeFrom( + other.internalGetSessionOptions()); + bitField0_ |= 0x00000001; + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + com.google.protobuf.MapEntry + sessionOptions__ = input.readMessage( + SessionOptionsDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); + internalGetMutableSessionOptions().getMutableMap().put( + sessionOptions__.getKey(), sessionOptions__.getValue()); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.MapField< + java.lang.String, org.apache.arrow.flight.impl.Flight.SessionOptionValue> sessionOptions_; + private com.google.protobuf.MapField + internalGetSessionOptions() { + if (sessionOptions_ == null) { + return com.google.protobuf.MapField.emptyMapField( + SessionOptionsDefaultEntryHolder.defaultEntry); + } + return sessionOptions_; + } + private com.google.protobuf.MapField + internalGetMutableSessionOptions() { + if (sessionOptions_ == null) { + sessionOptions_ = com.google.protobuf.MapField.newMapField( + SessionOptionsDefaultEntryHolder.defaultEntry); + } + if (!sessionOptions_.isMutable()) { + sessionOptions_ = sessionOptions_.copy(); + } + bitField0_ |= 0x00000001; + onChanged(); + return sessionOptions_; + } + public int getSessionOptionsCount() { + return internalGetSessionOptions().getMap().size(); + } + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + @java.lang.Override + public boolean containsSessionOptions( + java.lang.String key) { + if (key == null) { throw new NullPointerException("map key"); } + return internalGetSessionOptions().getMap().containsKey(key); + } + /** + * Use {@link #getSessionOptionsMap()} instead. + */ + @java.lang.Override + @java.lang.Deprecated + public java.util.Map getSessionOptions() { + return getSessionOptionsMap(); + } + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + @java.lang.Override + public java.util.Map getSessionOptionsMap() { + return internalGetSessionOptions().getMap(); + } + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + @java.lang.Override + public /* nullable */ +org.apache.arrow.flight.impl.Flight.SessionOptionValue getSessionOptionsOrDefault( + java.lang.String key, + /* nullable */ +org.apache.arrow.flight.impl.Flight.SessionOptionValue defaultValue) { + if (key == null) { throw new NullPointerException("map key"); } + java.util.Map map = + internalGetSessionOptions().getMap(); + return map.containsKey(key) ? map.get(key) : defaultValue; + } + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.SessionOptionValue getSessionOptionsOrThrow( + java.lang.String key) { + if (key == null) { throw new NullPointerException("map key"); } + java.util.Map map = + internalGetSessionOptions().getMap(); + if (!map.containsKey(key)) { + throw new java.lang.IllegalArgumentException(); + } + return map.get(key); + } + public Builder clearSessionOptions() { + bitField0_ = (bitField0_ & ~0x00000001); + internalGetMutableSessionOptions().getMutableMap() + .clear(); + return this; + } + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + public Builder removeSessionOptions( + java.lang.String key) { + if (key == null) { throw new NullPointerException("map key"); } + internalGetMutableSessionOptions().getMutableMap() + .remove(key); + return this; + } + /** + * Use alternate mutation accessors instead. + */ + @java.lang.Deprecated + public java.util.Map + getMutableSessionOptions() { + bitField0_ |= 0x00000001; + return internalGetMutableSessionOptions().getMutableMap(); + } + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + public Builder putSessionOptions( + java.lang.String key, + org.apache.arrow.flight.impl.Flight.SessionOptionValue value) { + if (key == null) { throw new NullPointerException("map key"); } + if (value == null) { throw new NullPointerException("map value"); } + internalGetMutableSessionOptions().getMutableMap() + .put(key, value); + bitField0_ |= 0x00000001; + return this; + } + /** + * map<string, .arrow.flight.protocol.SessionOptionValue> session_options = 1; + */ + public Builder putAllSessionOptions( + java.util.Map values) { + internalGetMutableSessionOptions().getMutableMap() + .putAll(values); + bitField0_ |= 0x00000001; + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.GetSessionOptionsResult) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.GetSessionOptionsResult) + private static final org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult(); + } + + public static org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public GetSessionOptionsResult parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.GetSessionOptionsResult getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface CloseSessionRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.CloseSessionRequest) + com.google.protobuf.MessageOrBuilder { + } + /** + *
+   *
+   * Request message for the "Close Session" action.
+   *
+   * The exiting session is referenced via a cookie header.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.CloseSessionRequest} + */ + public static final class CloseSessionRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.CloseSessionRequest) + CloseSessionRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use CloseSessionRequest.newBuilder() to construct. + private CloseSessionRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private CloseSessionRequest() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new CloseSessionRequest(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_CloseSessionRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_CloseSessionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.CloseSessionRequest.class, org.apache.arrow.flight.impl.Flight.CloseSessionRequest.Builder.class); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.CloseSessionRequest)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.CloseSessionRequest other = (org.apache.arrow.flight.impl.Flight.CloseSessionRequest) obj; + + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.CloseSessionRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.CloseSessionRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.CloseSessionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.CloseSessionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.CloseSessionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.CloseSessionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.CloseSessionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.CloseSessionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.CloseSessionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.CloseSessionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.CloseSessionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.CloseSessionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.CloseSessionRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Request message for the "Close Session" action.
+     *
+     * The exiting session is referenced via a cookie header.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.CloseSessionRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.CloseSessionRequest) + org.apache.arrow.flight.impl.Flight.CloseSessionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_CloseSessionRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_CloseSessionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.CloseSessionRequest.class, org.apache.arrow.flight.impl.Flight.CloseSessionRequest.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.CloseSessionRequest.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_CloseSessionRequest_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.CloseSessionRequest getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.CloseSessionRequest.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.CloseSessionRequest build() { + org.apache.arrow.flight.impl.Flight.CloseSessionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.CloseSessionRequest buildPartial() { + org.apache.arrow.flight.impl.Flight.CloseSessionRequest result = new org.apache.arrow.flight.impl.Flight.CloseSessionRequest(this); + onBuilt(); + return result; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.CloseSessionRequest) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.CloseSessionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.CloseSessionRequest other) { + if (other == org.apache.arrow.flight.impl.Flight.CloseSessionRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.CloseSessionRequest) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.CloseSessionRequest) + private static final org.apache.arrow.flight.impl.Flight.CloseSessionRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.CloseSessionRequest(); + } + + public static org.apache.arrow.flight.impl.Flight.CloseSessionRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CloseSessionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.CloseSessionRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface CloseSessionResultOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.CloseSessionResult) + com.google.protobuf.MessageOrBuilder { + + /** + * .arrow.flight.protocol.CloseSessionResult.Status status = 1; + * @return The enum numeric value on the wire for status. + */ + int getStatusValue(); + /** + * .arrow.flight.protocol.CloseSessionResult.Status status = 1; + * @return The status. + */ + org.apache.arrow.flight.impl.Flight.CloseSessionResult.Status getStatus(); + } + /** + *
+   *
+   * The result of closing a session.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.CloseSessionResult} + */ + public static final class CloseSessionResult extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.CloseSessionResult) + CloseSessionResultOrBuilder { + private static final long serialVersionUID = 0L; + // Use CloseSessionResult.newBuilder() to construct. + private CloseSessionResult(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private CloseSessionResult() { + status_ = 0; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new CloseSessionResult(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_CloseSessionResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_CloseSessionResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.CloseSessionResult.class, org.apache.arrow.flight.impl.Flight.CloseSessionResult.Builder.class); + } + + /** + * Protobuf enum {@code arrow.flight.protocol.CloseSessionResult.Status} + */ + public enum Status + implements com.google.protobuf.ProtocolMessageEnum { + /** + *
+       * Protobuf deserialization fallback value: The session close status is unknown or
+       * not recognized. Servers should avoid using this value (send a NOT_FOUND error if
+       * the requested session is not known or expired). Clients can retry the request.
+       * 
+ * + * UNSPECIFIED = 0; + */ + UNSPECIFIED(0), + /** + *
+       * The session close request is complete. Subsequent requests with
+       * the same session produce a NOT_FOUND error.
+       * 
+ * + * CLOSED = 1; + */ + CLOSED(1), + /** + *
+       * The session close request is in progress. The client may retry
+       * the close request.
+       * 
+ * + * CLOSING = 2; + */ + CLOSING(2), + /** + *
+       * The session is not closeable. The client should not retry the
+       * close request.
+       * 
+ * + * NOT_CLOSEABLE = 3; + */ + NOT_CLOSEABLE(3), + UNRECOGNIZED(-1), + ; + + /** + *
+       * Protobuf deserialization fallback value: The session close status is unknown or
+       * not recognized. Servers should avoid using this value (send a NOT_FOUND error if
+       * the requested session is not known or expired). Clients can retry the request.
+       * 
+ * + * UNSPECIFIED = 0; + */ + public static final int UNSPECIFIED_VALUE = 0; + /** + *
+       * The session close request is complete. Subsequent requests with
+       * the same session produce a NOT_FOUND error.
+       * 
+ * + * CLOSED = 1; + */ + public static final int CLOSED_VALUE = 1; + /** + *
+       * The session close request is in progress. The client may retry
+       * the close request.
+       * 
+ * + * CLOSING = 2; + */ + public static final int CLOSING_VALUE = 2; + /** + *
+       * The session is not closeable. The client should not retry the
+       * close request.
+       * 
+ * + * NOT_CLOSEABLE = 3; + */ + public static final int NOT_CLOSEABLE_VALUE = 3; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static Status valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static Status forNumber(int value) { + switch (value) { + case 0: return UNSPECIFIED; + case 1: return CLOSED; + case 2: return CLOSING; + case 3: return NOT_CLOSEABLE; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + Status> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public Status findValueByNumber(int number) { + return Status.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.CloseSessionResult.getDescriptor().getEnumTypes().get(0); + } + + private static final Status[] VALUES = values(); + + public static Status valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private Status(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.CloseSessionResult.Status) + } + + public static final int STATUS_FIELD_NUMBER = 1; + private int status_ = 0; + /** + * .arrow.flight.protocol.CloseSessionResult.Status status = 1; + * @return The enum numeric value on the wire for status. + */ + @java.lang.Override public int getStatusValue() { + return status_; + } + /** + * .arrow.flight.protocol.CloseSessionResult.Status status = 1; + * @return The status. + */ + @java.lang.Override public org.apache.arrow.flight.impl.Flight.CloseSessionResult.Status getStatus() { + org.apache.arrow.flight.impl.Flight.CloseSessionResult.Status result = org.apache.arrow.flight.impl.Flight.CloseSessionResult.Status.forNumber(status_); + return result == null ? org.apache.arrow.flight.impl.Flight.CloseSessionResult.Status.UNRECOGNIZED : result; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (status_ != org.apache.arrow.flight.impl.Flight.CloseSessionResult.Status.UNSPECIFIED.getNumber()) { + output.writeEnum(1, status_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (status_ != org.apache.arrow.flight.impl.Flight.CloseSessionResult.Status.UNSPECIFIED.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(1, status_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.impl.Flight.CloseSessionResult)) { + return super.equals(obj); + } + org.apache.arrow.flight.impl.Flight.CloseSessionResult other = (org.apache.arrow.flight.impl.Flight.CloseSessionResult) obj; + + if (status_ != other.status_) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + STATUS_FIELD_NUMBER; + hash = (53 * hash) + status_; + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.impl.Flight.CloseSessionResult parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.CloseSessionResult parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.CloseSessionResult parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.CloseSessionResult parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.CloseSessionResult parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.impl.Flight.CloseSessionResult parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.CloseSessionResult parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.CloseSessionResult parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.impl.Flight.CloseSessionResult parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.impl.Flight.CloseSessionResult parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.impl.Flight.CloseSessionResult parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.impl.Flight.CloseSessionResult parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.impl.Flight.CloseSessionResult prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * The result of closing a session.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.CloseSessionResult} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.CloseSessionResult) + org.apache.arrow.flight.impl.Flight.CloseSessionResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_CloseSessionResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_CloseSessionResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.impl.Flight.CloseSessionResult.class, org.apache.arrow.flight.impl.Flight.CloseSessionResult.Builder.class); + } + + // Construct using org.apache.arrow.flight.impl.Flight.CloseSessionResult.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + status_ = 0; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.impl.Flight.internal_static_arrow_flight_protocol_CloseSessionResult_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.CloseSessionResult getDefaultInstanceForType() { + return org.apache.arrow.flight.impl.Flight.CloseSessionResult.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.CloseSessionResult build() { + org.apache.arrow.flight.impl.Flight.CloseSessionResult result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.CloseSessionResult buildPartial() { + org.apache.arrow.flight.impl.Flight.CloseSessionResult result = new org.apache.arrow.flight.impl.Flight.CloseSessionResult(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.impl.Flight.CloseSessionResult result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.status_ = status_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.impl.Flight.CloseSessionResult) { + return mergeFrom((org.apache.arrow.flight.impl.Flight.CloseSessionResult)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.impl.Flight.CloseSessionResult other) { + if (other == org.apache.arrow.flight.impl.Flight.CloseSessionResult.getDefaultInstance()) return this; + if (other.status_ != 0) { + setStatusValue(other.getStatusValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + status_ = input.readEnum(); + bitField0_ |= 0x00000001; + break; + } // case 8 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private int status_ = 0; + /** + * .arrow.flight.protocol.CloseSessionResult.Status status = 1; + * @return The enum numeric value on the wire for status. + */ + @java.lang.Override public int getStatusValue() { + return status_; + } + /** + * .arrow.flight.protocol.CloseSessionResult.Status status = 1; + * @param value The enum numeric value on the wire for status to set. + * @return This builder for chaining. + */ + public Builder setStatusValue(int value) { + status_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * .arrow.flight.protocol.CloseSessionResult.Status status = 1; + * @return The status. + */ + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.CloseSessionResult.Status getStatus() { + org.apache.arrow.flight.impl.Flight.CloseSessionResult.Status result = org.apache.arrow.flight.impl.Flight.CloseSessionResult.Status.forNumber(status_); + return result == null ? org.apache.arrow.flight.impl.Flight.CloseSessionResult.Status.UNRECOGNIZED : result; + } + /** + * .arrow.flight.protocol.CloseSessionResult.Status status = 1; + * @param value The status to set. + * @return This builder for chaining. + */ + public Builder setStatus(org.apache.arrow.flight.impl.Flight.CloseSessionResult.Status value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + status_ = value.getNumber(); + onChanged(); + return this; + } + /** + * .arrow.flight.protocol.CloseSessionResult.Status status = 1; + * @return This builder for chaining. + */ + public Builder clearStatus() { + bitField0_ = (bitField0_ & ~0x00000001); + status_ = 0; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.CloseSessionResult) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.CloseSessionResult) + private static final org.apache.arrow.flight.impl.Flight.CloseSessionResult DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.impl.Flight.CloseSessionResult(); + } + + public static org.apache.arrow.flight.impl.Flight.CloseSessionResult getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CloseSessionResult parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.impl.Flight.CloseSessionResult getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_HandshakeRequest_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_HandshakeRequest_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_HandshakeResponse_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_HandshakeResponse_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_BasicAuth_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_BasicAuth_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_Empty_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_Empty_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_ActionType_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_ActionType_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_Criteria_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_Criteria_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_Action_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_Action_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_CancelFlightInfoRequest_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_CancelFlightInfoRequest_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_RenewFlightEndpointRequest_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_RenewFlightEndpointRequest_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_Result_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_Result_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_CancelFlightInfoResult_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_CancelFlightInfoResult_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_SchemaResult_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_SchemaResult_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_FlightDescriptor_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_FlightDescriptor_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_FlightInfo_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_FlightInfo_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_PollInfo_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_PollInfo_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_FlightEndpoint_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_FlightEndpoint_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_Location_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_Location_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_Ticket_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_Ticket_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_FlightData_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_FlightData_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_PutResult_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_PutResult_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_SessionOptionValue_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_SessionOptionValue_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_SessionOptionValue_StringListValue_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_SessionOptionValue_StringListValue_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_SetSessionOptionsRequest_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_SetSessionOptionsRequest_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_SetSessionOptionsRequest_SessionOptionsEntry_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_SetSessionOptionsRequest_SessionOptionsEntry_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_SetSessionOptionsResult_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_SetSessionOptionsResult_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_SetSessionOptionsResult_Error_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_SetSessionOptionsResult_Error_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_SetSessionOptionsResult_ErrorsEntry_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_SetSessionOptionsResult_ErrorsEntry_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_GetSessionOptionsRequest_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_GetSessionOptionsRequest_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_GetSessionOptionsResult_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_GetSessionOptionsResult_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_GetSessionOptionsResult_SessionOptionsEntry_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_GetSessionOptionsResult_SessionOptionsEntry_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_CloseSessionRequest_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_CloseSessionRequest_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_CloseSessionResult_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_CloseSessionResult_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\014Flight.proto\022\025arrow.flight.protocol\032\037g" + + "oogle/protobuf/timestamp.proto\"=\n\020Handsh" + + "akeRequest\022\030\n\020protocol_version\030\001 \001(\004\022\017\n\007" + + "payload\030\002 \001(\014\">\n\021HandshakeResponse\022\030\n\020pr" + + "otocol_version\030\001 \001(\004\022\017\n\007payload\030\002 \001(\014\"/\n" + + "\tBasicAuth\022\020\n\010username\030\002 \001(\t\022\020\n\010password" + + "\030\003 \001(\t\"\007\n\005Empty\"/\n\nActionType\022\014\n\004type\030\001 " + + "\001(\t\022\023\n\013description\030\002 \001(\t\"\036\n\010Criteria\022\022\n\n" + + "expression\030\001 \001(\014\"$\n\006Action\022\014\n\004type\030\001 \001(\t" + + "\022\014\n\004body\030\002 \001(\014\"J\n\027CancelFlightInfoReques" + + "t\022/\n\004info\030\001 \001(\0132!.arrow.flight.protocol." + + "FlightInfo\"U\n\032RenewFlightEndpointRequest" + + "\0227\n\010endpoint\030\001 \001(\0132%.arrow.flight.protoc" + + "ol.FlightEndpoint\"\026\n\006Result\022\014\n\004body\030\001 \001(" + + "\014\"M\n\026CancelFlightInfoResult\0223\n\006status\030\001 " + + "\001(\0162#.arrow.flight.protocol.CancelStatus" + + "\"\036\n\014SchemaResult\022\016\n\006schema\030\001 \001(\014\"\245\001\n\020Fli" + + "ghtDescriptor\022D\n\004type\030\001 \001(\01626.arrow.flig" + + "ht.protocol.FlightDescriptor.DescriptorT" + + "ype\022\013\n\003cmd\030\002 \001(\014\022\014\n\004path\030\003 \003(\t\"0\n\016Descri" + + "ptorType\022\013\n\007UNKNOWN\020\000\022\010\n\004PATH\020\001\022\007\n\003CMD\020\002" + + "\"\354\001\n\nFlightInfo\022\016\n\006schema\030\001 \001(\014\022B\n\021fligh" + + "t_descriptor\030\002 \001(\0132\'.arrow.flight.protoc" + + "ol.FlightDescriptor\0227\n\010endpoint\030\003 \003(\0132%." + + "arrow.flight.protocol.FlightEndpoint\022\025\n\r" + + "total_records\030\004 \001(\003\022\023\n\013total_bytes\030\005 \001(\003" + + "\022\017\n\007ordered\030\006 \001(\010\022\024\n\014app_metadata\030\007 \001(\014\"" + + "\330\001\n\010PollInfo\022/\n\004info\030\001 \001(\0132!.arrow.fligh" + + "t.protocol.FlightInfo\022B\n\021flight_descript" + + "or\030\002 \001(\0132\'.arrow.flight.protocol.FlightD" + + "escriptor\022\025\n\010progress\030\003 \001(\001H\000\210\001\001\0223\n\017expi" + + "ration_time\030\004 \001(\0132\032.google.protobuf.Time" + + "stampB\013\n\t_progress\"\275\001\n\016FlightEndpoint\022-\n" + + "\006ticket\030\001 \001(\0132\035.arrow.flight.protocol.Ti" + + "cket\0221\n\010location\030\002 \003(\0132\037.arrow.flight.pr" + + "otocol.Location\0223\n\017expiration_time\030\003 \001(\013" + + "2\032.google.protobuf.Timestamp\022\024\n\014app_meta" + + "data\030\004 \001(\014\"\027\n\010Location\022\013\n\003uri\030\001 \001(\t\"\030\n\006T" + + "icket\022\016\n\006ticket\030\001 \001(\014\"\217\001\n\nFlightData\022B\n\021" + + "flight_descriptor\030\001 \001(\0132\'.arrow.flight.p" + + "rotocol.FlightDescriptor\022\023\n\013data_header\030" + + "\002 \001(\014\022\024\n\014app_metadata\030\003 \001(\014\022\022\n\tdata_body" + + "\030\350\007 \001(\014\"!\n\tPutResult\022\024\n\014app_metadata\030\001 \001" + + "(\014\"\374\001\n\022SessionOptionValue\022\026\n\014string_valu" + + "e\030\001 \001(\tH\000\022\024\n\nbool_value\030\002 \001(\010H\000\022\025\n\013int64" + + "_value\030\003 \001(\020H\000\022\026\n\014double_value\030\004 \001(\001H\000\022V" + + "\n\021string_list_value\030\005 \001(\01329.arrow.flight" + + ".protocol.SessionOptionValue.StringListV" + + "alueH\000\032!\n\017StringListValue\022\016\n\006values\030\001 \003(" + + "\tB\016\n\014option_value\"\332\001\n\030SetSessionOptionsR" + + "equest\022\\\n\017session_options\030\001 \003(\0132C.arrow." + + "flight.protocol.SetSessionOptionsRequest" + + ".SessionOptionsEntry\032`\n\023SessionOptionsEn" + + "try\022\013\n\003key\030\001 \001(\t\0228\n\005value\030\002 \001(\0132).arrow." + + "flight.protocol.SessionOptionValue:\0028\001\"\354" + + "\002\n\027SetSessionOptionsResult\022J\n\006errors\030\001 \003" + + "(\0132:.arrow.flight.protocol.SetSessionOpt" + + "ionsResult.ErrorsEntry\032Q\n\005Error\022H\n\005value" + + "\030\001 \001(\01629.arrow.flight.protocol.SetSessio" + + "nOptionsResult.ErrorValue\032c\n\013ErrorsEntry" + + "\022\013\n\003key\030\001 \001(\t\022C\n\005value\030\002 \001(\01324.arrow.fli" + + "ght.protocol.SetSessionOptionsResult.Err" + + "or:\0028\001\"M\n\nErrorValue\022\017\n\013UNSPECIFIED\020\000\022\020\n" + + "\014INVALID_NAME\020\001\022\021\n\rINVALID_VALUE\020\002\022\t\n\005ER" + + "ROR\020\003\"\032\n\030GetSessionOptionsRequest\"\330\001\n\027Ge" + + "tSessionOptionsResult\022[\n\017session_options" + + "\030\001 \003(\0132B.arrow.flight.protocol.GetSessio" + + "nOptionsResult.SessionOptionsEntry\032`\n\023Se" + + "ssionOptionsEntry\022\013\n\003key\030\001 \001(\t\0228\n\005value\030" + + "\002 \001(\0132).arrow.flight.protocol.SessionOpt" + + "ionValue:\0028\001\"\025\n\023CloseSessionRequest\"\235\001\n\022" + + "CloseSessionResult\022@\n\006status\030\001 \001(\01620.arr" + + "ow.flight.protocol.CloseSessionResult.St" + + "atus\"E\n\006Status\022\017\n\013UNSPECIFIED\020\000\022\n\n\006CLOSE" + + "D\020\001\022\013\n\007CLOSING\020\002\022\021\n\rNOT_CLOSEABLE\020\003*\213\001\n\014" + + "CancelStatus\022\035\n\031CANCEL_STATUS_UNSPECIFIE" + + "D\020\000\022\033\n\027CANCEL_STATUS_CANCELLED\020\001\022\034\n\030CANC" + + "EL_STATUS_CANCELLING\020\002\022!\n\035CANCEL_STATUS_" + + "NOT_CANCELLABLE\020\0032\205\007\n\rFlightService\022d\n\tH" + + "andshake\022\'.arrow.flight.protocol.Handsha" + + "keRequest\032(.arrow.flight.protocol.Handsh" + + "akeResponse\"\000(\0010\001\022U\n\013ListFlights\022\037.arrow" + + ".flight.protocol.Criteria\032!.arrow.flight" + + ".protocol.FlightInfo\"\0000\001\022]\n\rGetFlightInf" + + "o\022\'.arrow.flight.protocol.FlightDescript" + + "or\032!.arrow.flight.protocol.FlightInfo\"\000\022" + + "\\\n\016PollFlightInfo\022\'.arrow.flight.protoco" + + "l.FlightDescriptor\032\037.arrow.flight.protoc" + + "ol.PollInfo\"\000\022[\n\tGetSchema\022\'.arrow.fligh" + + "t.protocol.FlightDescriptor\032#.arrow.flig" + + "ht.protocol.SchemaResult\"\000\022M\n\005DoGet\022\035.ar" + + "row.flight.protocol.Ticket\032!.arrow.fligh" + + "t.protocol.FlightData\"\0000\001\022R\n\005DoPut\022!.arr" + + "ow.flight.protocol.FlightData\032 .arrow.fl" + + "ight.protocol.PutResult\"\000(\0010\001\022X\n\nDoExcha" + + "nge\022!.arrow.flight.protocol.FlightData\032!" + + ".arrow.flight.protocol.FlightData\"\000(\0010\001\022" + + "L\n\010DoAction\022\035.arrow.flight.protocol.Acti" + + "on\032\035.arrow.flight.protocol.Result\"\0000\001\022R\n" + + "\013ListActions\022\034.arrow.flight.protocol.Emp" + + "ty\032!.arrow.flight.protocol.ActionType\"\0000" + + "\001Bq\n\034org.apache.arrow.flight.implZ2githu" + + "b.com/apache/arrow/go/arrow/flight/gen/f" + + "light\252\002\034Apache.Arrow.Flight.Protocolb\006pr" + + "oto3" + }; + descriptor = com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + com.google.protobuf.TimestampProto.getDescriptor(), + }); + internal_static_arrow_flight_protocol_HandshakeRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_arrow_flight_protocol_HandshakeRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_HandshakeRequest_descriptor, + new java.lang.String[] { "ProtocolVersion", "Payload", }); + internal_static_arrow_flight_protocol_HandshakeResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_arrow_flight_protocol_HandshakeResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_HandshakeResponse_descriptor, + new java.lang.String[] { "ProtocolVersion", "Payload", }); + internal_static_arrow_flight_protocol_BasicAuth_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_arrow_flight_protocol_BasicAuth_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_BasicAuth_descriptor, + new java.lang.String[] { "Username", "Password", }); + internal_static_arrow_flight_protocol_Empty_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_arrow_flight_protocol_Empty_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_Empty_descriptor, + new java.lang.String[] { }); + internal_static_arrow_flight_protocol_ActionType_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_arrow_flight_protocol_ActionType_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_ActionType_descriptor, + new java.lang.String[] { "Type", "Description", }); + internal_static_arrow_flight_protocol_Criteria_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_arrow_flight_protocol_Criteria_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_Criteria_descriptor, + new java.lang.String[] { "Expression", }); + internal_static_arrow_flight_protocol_Action_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_arrow_flight_protocol_Action_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_Action_descriptor, + new java.lang.String[] { "Type", "Body", }); + internal_static_arrow_flight_protocol_CancelFlightInfoRequest_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_arrow_flight_protocol_CancelFlightInfoRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_CancelFlightInfoRequest_descriptor, + new java.lang.String[] { "Info", }); + internal_static_arrow_flight_protocol_RenewFlightEndpointRequest_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_arrow_flight_protocol_RenewFlightEndpointRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_RenewFlightEndpointRequest_descriptor, + new java.lang.String[] { "Endpoint", }); + internal_static_arrow_flight_protocol_Result_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_arrow_flight_protocol_Result_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_Result_descriptor, + new java.lang.String[] { "Body", }); + internal_static_arrow_flight_protocol_CancelFlightInfoResult_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_arrow_flight_protocol_CancelFlightInfoResult_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_CancelFlightInfoResult_descriptor, + new java.lang.String[] { "Status", }); + internal_static_arrow_flight_protocol_SchemaResult_descriptor = + getDescriptor().getMessageTypes().get(11); + internal_static_arrow_flight_protocol_SchemaResult_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_SchemaResult_descriptor, + new java.lang.String[] { "Schema", }); + internal_static_arrow_flight_protocol_FlightDescriptor_descriptor = + getDescriptor().getMessageTypes().get(12); + internal_static_arrow_flight_protocol_FlightDescriptor_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_FlightDescriptor_descriptor, + new java.lang.String[] { "Type", "Cmd", "Path", }); + internal_static_arrow_flight_protocol_FlightInfo_descriptor = + getDescriptor().getMessageTypes().get(13); + internal_static_arrow_flight_protocol_FlightInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_FlightInfo_descriptor, + new java.lang.String[] { "Schema", "FlightDescriptor", "Endpoint", "TotalRecords", "TotalBytes", "Ordered", "AppMetadata", }); + internal_static_arrow_flight_protocol_PollInfo_descriptor = + getDescriptor().getMessageTypes().get(14); + internal_static_arrow_flight_protocol_PollInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_PollInfo_descriptor, + new java.lang.String[] { "Info", "FlightDescriptor", "Progress", "ExpirationTime", "Progress", }); + internal_static_arrow_flight_protocol_FlightEndpoint_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_arrow_flight_protocol_FlightEndpoint_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_FlightEndpoint_descriptor, + new java.lang.String[] { "Ticket", "Location", "ExpirationTime", "AppMetadata", }); + internal_static_arrow_flight_protocol_Location_descriptor = + getDescriptor().getMessageTypes().get(16); + internal_static_arrow_flight_protocol_Location_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_Location_descriptor, + new java.lang.String[] { "Uri", }); + internal_static_arrow_flight_protocol_Ticket_descriptor = + getDescriptor().getMessageTypes().get(17); + internal_static_arrow_flight_protocol_Ticket_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_Ticket_descriptor, + new java.lang.String[] { "Ticket", }); + internal_static_arrow_flight_protocol_FlightData_descriptor = + getDescriptor().getMessageTypes().get(18); + internal_static_arrow_flight_protocol_FlightData_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_FlightData_descriptor, + new java.lang.String[] { "FlightDescriptor", "DataHeader", "AppMetadata", "DataBody", }); + internal_static_arrow_flight_protocol_PutResult_descriptor = + getDescriptor().getMessageTypes().get(19); + internal_static_arrow_flight_protocol_PutResult_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_PutResult_descriptor, + new java.lang.String[] { "AppMetadata", }); + internal_static_arrow_flight_protocol_SessionOptionValue_descriptor = + getDescriptor().getMessageTypes().get(20); + internal_static_arrow_flight_protocol_SessionOptionValue_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_SessionOptionValue_descriptor, + new java.lang.String[] { "StringValue", "BoolValue", "Int64Value", "DoubleValue", "StringListValue", "OptionValue", }); + internal_static_arrow_flight_protocol_SessionOptionValue_StringListValue_descriptor = + internal_static_arrow_flight_protocol_SessionOptionValue_descriptor.getNestedTypes().get(0); + internal_static_arrow_flight_protocol_SessionOptionValue_StringListValue_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_SessionOptionValue_StringListValue_descriptor, + new java.lang.String[] { "Values", }); + internal_static_arrow_flight_protocol_SetSessionOptionsRequest_descriptor = + getDescriptor().getMessageTypes().get(21); + internal_static_arrow_flight_protocol_SetSessionOptionsRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_SetSessionOptionsRequest_descriptor, + new java.lang.String[] { "SessionOptions", }); + internal_static_arrow_flight_protocol_SetSessionOptionsRequest_SessionOptionsEntry_descriptor = + internal_static_arrow_flight_protocol_SetSessionOptionsRequest_descriptor.getNestedTypes().get(0); + internal_static_arrow_flight_protocol_SetSessionOptionsRequest_SessionOptionsEntry_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_SetSessionOptionsRequest_SessionOptionsEntry_descriptor, + new java.lang.String[] { "Key", "Value", }); + internal_static_arrow_flight_protocol_SetSessionOptionsResult_descriptor = + getDescriptor().getMessageTypes().get(22); + internal_static_arrow_flight_protocol_SetSessionOptionsResult_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_SetSessionOptionsResult_descriptor, + new java.lang.String[] { "Errors", }); + internal_static_arrow_flight_protocol_SetSessionOptionsResult_Error_descriptor = + internal_static_arrow_flight_protocol_SetSessionOptionsResult_descriptor.getNestedTypes().get(0); + internal_static_arrow_flight_protocol_SetSessionOptionsResult_Error_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_SetSessionOptionsResult_Error_descriptor, + new java.lang.String[] { "Value", }); + internal_static_arrow_flight_protocol_SetSessionOptionsResult_ErrorsEntry_descriptor = + internal_static_arrow_flight_protocol_SetSessionOptionsResult_descriptor.getNestedTypes().get(1); + internal_static_arrow_flight_protocol_SetSessionOptionsResult_ErrorsEntry_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_SetSessionOptionsResult_ErrorsEntry_descriptor, + new java.lang.String[] { "Key", "Value", }); + internal_static_arrow_flight_protocol_GetSessionOptionsRequest_descriptor = + getDescriptor().getMessageTypes().get(23); + internal_static_arrow_flight_protocol_GetSessionOptionsRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_GetSessionOptionsRequest_descriptor, + new java.lang.String[] { }); + internal_static_arrow_flight_protocol_GetSessionOptionsResult_descriptor = + getDescriptor().getMessageTypes().get(24); + internal_static_arrow_flight_protocol_GetSessionOptionsResult_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_GetSessionOptionsResult_descriptor, + new java.lang.String[] { "SessionOptions", }); + internal_static_arrow_flight_protocol_GetSessionOptionsResult_SessionOptionsEntry_descriptor = + internal_static_arrow_flight_protocol_GetSessionOptionsResult_descriptor.getNestedTypes().get(0); + internal_static_arrow_flight_protocol_GetSessionOptionsResult_SessionOptionsEntry_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_GetSessionOptionsResult_SessionOptionsEntry_descriptor, + new java.lang.String[] { "Key", "Value", }); + internal_static_arrow_flight_protocol_CloseSessionRequest_descriptor = + getDescriptor().getMessageTypes().get(25); + internal_static_arrow_flight_protocol_CloseSessionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_CloseSessionRequest_descriptor, + new java.lang.String[] { }); + internal_static_arrow_flight_protocol_CloseSessionResult_descriptor = + getDescriptor().getMessageTypes().get(26); + internal_static_arrow_flight_protocol_CloseSessionResult_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_CloseSessionResult_descriptor, + new java.lang.String[] { "Status", }); + com.google.protobuf.TimestampProto.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/java/flight/flight-core/target/generated-sources/protobuf/java/org/apache/arrow/flight/sql/impl/FlightSql.java b/java/flight/flight-core/target/generated-sources/protobuf/java/org/apache/arrow/flight/sql/impl/FlightSql.java new file mode 100644 index 000000000000..8b0ab237faff --- /dev/null +++ b/java/flight/flight-core/target/generated-sources/protobuf/java/org/apache/arrow/flight/sql/impl/FlightSql.java @@ -0,0 +1,29377 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: FlightSql.proto + +package org.apache.arrow.flight.sql.impl; + +public final class FlightSql { + private FlightSql() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + registry.add(org.apache.arrow.flight.sql.impl.FlightSql.experimental); + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + /** + *
+   * Options for CommandGetSqlInfo.
+   * 
+ * + * Protobuf enum {@code arrow.flight.protocol.sql.SqlInfo} + */ + public enum SqlInfo + implements com.google.protobuf.ProtocolMessageEnum { + /** + *
+     * Retrieves a UTF-8 string with the name of the Flight SQL Server.
+     * 
+ * + * FLIGHT_SQL_SERVER_NAME = 0; + */ + FLIGHT_SQL_SERVER_NAME(0), + /** + *
+     * Retrieves a UTF-8 string with the native version of the Flight SQL Server.
+     * 
+ * + * FLIGHT_SQL_SERVER_VERSION = 1; + */ + FLIGHT_SQL_SERVER_VERSION(1), + /** + *
+     * Retrieves a UTF-8 string with the Arrow format version of the Flight SQL Server.
+     * 
+ * + * FLIGHT_SQL_SERVER_ARROW_VERSION = 2; + */ + FLIGHT_SQL_SERVER_ARROW_VERSION(2), + /** + *
+     *
+     * Retrieves a boolean value indicating whether the Flight SQL Server is read only.
+     *
+     * Returns:
+     * - false: if read-write
+     * - true: if read only
+     * 
+ * + * FLIGHT_SQL_SERVER_READ_ONLY = 3; + */ + FLIGHT_SQL_SERVER_READ_ONLY(3), + /** + *
+     *
+     * Retrieves a boolean value indicating whether the Flight SQL Server supports executing
+     * SQL queries.
+     *
+     * Note that the absence of this info (as opposed to a false value) does not necessarily
+     * mean that SQL is not supported, as this property was not originally defined.
+     * 
+ * + * FLIGHT_SQL_SERVER_SQL = 4; + */ + FLIGHT_SQL_SERVER_SQL(4), + /** + *
+     *
+     * Retrieves a boolean value indicating whether the Flight SQL Server supports executing
+     * Substrait plans.
+     * 
+ * + * FLIGHT_SQL_SERVER_SUBSTRAIT = 5; + */ + FLIGHT_SQL_SERVER_SUBSTRAIT(5), + /** + *
+     *
+     * Retrieves a string value indicating the minimum supported Substrait version, or null
+     * if Substrait is not supported.
+     * 
+ * + * FLIGHT_SQL_SERVER_SUBSTRAIT_MIN_VERSION = 6; + */ + FLIGHT_SQL_SERVER_SUBSTRAIT_MIN_VERSION(6), + /** + *
+     *
+     * Retrieves a string value indicating the maximum supported Substrait version, or null
+     * if Substrait is not supported.
+     * 
+ * + * FLIGHT_SQL_SERVER_SUBSTRAIT_MAX_VERSION = 7; + */ + FLIGHT_SQL_SERVER_SUBSTRAIT_MAX_VERSION(7), + /** + *
+     *
+     * Retrieves an int32 indicating whether the Flight SQL Server supports the
+     * BeginTransaction/EndTransaction/BeginSavepoint/EndSavepoint actions.
+     *
+     * Even if this is not supported, the database may still support explicit "BEGIN
+     * TRANSACTION"/"COMMIT" SQL statements (see SQL_TRANSACTIONS_SUPPORTED); this property
+     * is only about whether the server implements the Flight SQL API endpoints.
+     *
+     * The possible values are listed in `SqlSupportedTransaction`.
+     * 
+ * + * FLIGHT_SQL_SERVER_TRANSACTION = 8; + */ + FLIGHT_SQL_SERVER_TRANSACTION(8), + /** + *
+     *
+     * Retrieves a boolean value indicating whether the Flight SQL Server supports explicit
+     * query cancellation (the CancelQuery action).
+     * 
+ * + * FLIGHT_SQL_SERVER_CANCEL = 9; + */ + FLIGHT_SQL_SERVER_CANCEL(9), + /** + *
+     *
+     * Retrieves an int32 indicating the timeout (in milliseconds) for prepared statement handles.
+     *
+     * If 0, there is no timeout.  Servers should reset the timeout when the handle is used in a command.
+     * 
+ * + * FLIGHT_SQL_SERVER_STATEMENT_TIMEOUT = 100; + */ + FLIGHT_SQL_SERVER_STATEMENT_TIMEOUT(100), + /** + *
+     *
+     * Retrieves an int32 indicating the timeout (in milliseconds) for transactions, since transactions are not tied to a connection.
+     *
+     * If 0, there is no timeout.  Servers should reset the timeout when the handle is used in a command.
+     * 
+ * + * FLIGHT_SQL_SERVER_TRANSACTION_TIMEOUT = 101; + */ + FLIGHT_SQL_SERVER_TRANSACTION_TIMEOUT(101), + /** + *
+     *
+     * Retrieves a boolean value indicating whether the Flight SQL Server supports CREATE and DROP of catalogs.
+     *
+     * Returns:
+     * - false: if it doesn't support CREATE and DROP of catalogs.
+     * - true: if it supports CREATE and DROP of catalogs.
+     * 
+ * + * SQL_DDL_CATALOG = 500; + */ + SQL_DDL_CATALOG(500), + /** + *
+     *
+     * Retrieves a boolean value indicating whether the Flight SQL Server supports CREATE and DROP of schemas.
+     *
+     * Returns:
+     * - false: if it doesn't support CREATE and DROP of schemas.
+     * - true: if it supports CREATE and DROP of schemas.
+     * 
+ * + * SQL_DDL_SCHEMA = 501; + */ + SQL_DDL_SCHEMA(501), + /** + *
+     *
+     * Indicates whether the Flight SQL Server supports CREATE and DROP of tables.
+     *
+     * Returns:
+     * - false: if it doesn't support CREATE and DROP of tables.
+     * - true: if it supports CREATE and DROP of tables.
+     * 
+ * + * SQL_DDL_TABLE = 502; + */ + SQL_DDL_TABLE(502), + /** + *
+     *
+     * Retrieves a int32 ordinal representing the case sensitivity of catalog, table, schema and table names.
+     *
+     * The possible values are listed in `arrow.flight.protocol.sql.SqlSupportedCaseSensitivity`.
+     * 
+ * + * SQL_IDENTIFIER_CASE = 503; + */ + SQL_IDENTIFIER_CASE(503), + /** + *
+     * Retrieves a UTF-8 string with the supported character(s) used to surround a delimited identifier.
+     * 
+ * + * SQL_IDENTIFIER_QUOTE_CHAR = 504; + */ + SQL_IDENTIFIER_QUOTE_CHAR(504), + /** + *
+     *
+     * Retrieves a int32 describing the case sensitivity of quoted identifiers.
+     *
+     * The possible values are listed in `arrow.flight.protocol.sql.SqlSupportedCaseSensitivity`.
+     * 
+ * + * SQL_QUOTED_IDENTIFIER_CASE = 505; + */ + SQL_QUOTED_IDENTIFIER_CASE(505), + /** + *
+     *
+     * Retrieves a boolean value indicating whether all tables are selectable.
+     *
+     * Returns:
+     * - false: if not all tables are selectable or if none are;
+     * - true: if all tables are selectable.
+     * 
+ * + * SQL_ALL_TABLES_ARE_SELECTABLE = 506; + */ + SQL_ALL_TABLES_ARE_SELECTABLE(506), + /** + *
+     *
+     * Retrieves the null ordering.
+     *
+     * Returns a int32 ordinal for the null ordering being used, as described in
+     * `arrow.flight.protocol.sql.SqlNullOrdering`.
+     * 
+ * + * SQL_NULL_ORDERING = 507; + */ + SQL_NULL_ORDERING(507), + /** + *
+     * Retrieves a UTF-8 string list with values of the supported keywords.
+     * 
+ * + * SQL_KEYWORDS = 508; + */ + SQL_KEYWORDS(508), + /** + *
+     * Retrieves a UTF-8 string list with values of the supported numeric functions.
+     * 
+ * + * SQL_NUMERIC_FUNCTIONS = 509; + */ + SQL_NUMERIC_FUNCTIONS(509), + /** + *
+     * Retrieves a UTF-8 string list with values of the supported string functions.
+     * 
+ * + * SQL_STRING_FUNCTIONS = 510; + */ + SQL_STRING_FUNCTIONS(510), + /** + *
+     * Retrieves a UTF-8 string list with values of the supported system functions.
+     * 
+ * + * SQL_SYSTEM_FUNCTIONS = 511; + */ + SQL_SYSTEM_FUNCTIONS(511), + /** + *
+     * Retrieves a UTF-8 string list with values of the supported datetime functions.
+     * 
+ * + * SQL_DATETIME_FUNCTIONS = 512; + */ + SQL_DATETIME_FUNCTIONS(512), + /** + *
+     *
+     * Retrieves the UTF-8 string that can be used to escape wildcard characters.
+     * This is the string that can be used to escape '_' or '%' in the catalog search parameters that are a pattern
+     * (and therefore use one of the wildcard characters).
+     * The '_' character represents any single character; the '%' character represents any sequence of zero or more
+     * characters.
+     * 
+ * + * SQL_SEARCH_STRING_ESCAPE = 513; + */ + SQL_SEARCH_STRING_ESCAPE(513), + /** + *
+     *
+     * Retrieves a UTF-8 string with all the "extra" characters that can be used in unquoted identifier names
+     * (those beyond a-z, A-Z, 0-9 and _).
+     * 
+ * + * SQL_EXTRA_NAME_CHARACTERS = 514; + */ + SQL_EXTRA_NAME_CHARACTERS(514), + /** + *
+     *
+     * Retrieves a boolean value indicating whether column aliasing is supported.
+     * If so, the SQL AS clause can be used to provide names for computed columns or to provide alias names for columns
+     * as required.
+     *
+     * Returns:
+     * - false: if column aliasing is unsupported;
+     * - true: if column aliasing is supported.
+     * 
+ * + * SQL_SUPPORTS_COLUMN_ALIASING = 515; + */ + SQL_SUPPORTS_COLUMN_ALIASING(515), + /** + *
+     *
+     * Retrieves a boolean value indicating whether concatenations between null and non-null values being
+     * null are supported.
+     *
+     * - Returns:
+     * - false: if concatenations between null and non-null values being null are unsupported;
+     * - true: if concatenations between null and non-null values being null are supported.
+     * 
+ * + * SQL_NULL_PLUS_NULL_IS_NULL = 516; + */ + SQL_NULL_PLUS_NULL_IS_NULL(516), + /** + *
+     *
+     * Retrieves a map where the key is the type to convert from and the value is a list with the types to convert to,
+     * indicating the supported conversions. Each key and each item on the list value is a value to a predefined type on
+     * SqlSupportsConvert enum.
+     * The returned map will be:  map<int32, list<int32>>
+     * 
+ * + * SQL_SUPPORTS_CONVERT = 517; + */ + SQL_SUPPORTS_CONVERT(517), + /** + *
+     *
+     * Retrieves a boolean value indicating whether, when table correlation names are supported,
+     * they are restricted to being different from the names of the tables.
+     *
+     * Returns:
+     * - false: if table correlation names are unsupported;
+     * - true: if table correlation names are supported.
+     * 
+ * + * SQL_SUPPORTS_TABLE_CORRELATION_NAMES = 518; + */ + SQL_SUPPORTS_TABLE_CORRELATION_NAMES(518), + /** + *
+     *
+     * Retrieves a boolean value indicating whether, when table correlation names are supported,
+     * they are restricted to being different from the names of the tables.
+     *
+     * Returns:
+     * - false: if different table correlation names are unsupported;
+     * - true: if different table correlation names are supported
+     * 
+ * + * SQL_SUPPORTS_DIFFERENT_TABLE_CORRELATION_NAMES = 519; + */ + SQL_SUPPORTS_DIFFERENT_TABLE_CORRELATION_NAMES(519), + /** + *
+     *
+     * Retrieves a boolean value indicating whether expressions in ORDER BY lists are supported.
+     *
+     * Returns:
+     * - false: if expressions in ORDER BY are unsupported;
+     * - true: if expressions in ORDER BY are supported;
+     * 
+ * + * SQL_SUPPORTS_EXPRESSIONS_IN_ORDER_BY = 520; + */ + SQL_SUPPORTS_EXPRESSIONS_IN_ORDER_BY(520), + /** + *
+     *
+     * Retrieves a boolean value indicating whether using a column that is not in the SELECT statement in a GROUP BY
+     * clause is supported.
+     *
+     * Returns:
+     * - false: if using a column that is not in the SELECT statement in a GROUP BY clause is unsupported;
+     * - true: if using a column that is not in the SELECT statement in a GROUP BY clause is supported.
+     * 
+ * + * SQL_SUPPORTS_ORDER_BY_UNRELATED = 521; + */ + SQL_SUPPORTS_ORDER_BY_UNRELATED(521), + /** + *
+     *
+     * Retrieves the supported GROUP BY commands;
+     *
+     * Returns an int32 bitmask value representing the supported commands.
+     * The returned bitmask should be parsed in order to retrieve the supported commands.
+     *
+     * For instance:
+     * - return 0 (\b0)   => [] (GROUP BY is unsupported);
+     * - return 1 (\b1)   => [SQL_GROUP_BY_UNRELATED];
+     * - return 2 (\b10)  => [SQL_GROUP_BY_BEYOND_SELECT];
+     * - return 3 (\b11)  => [SQL_GROUP_BY_UNRELATED, SQL_GROUP_BY_BEYOND_SELECT].
+     * Valid GROUP BY types are described under `arrow.flight.protocol.sql.SqlSupportedGroupBy`.
+     * 
+ * + * SQL_SUPPORTED_GROUP_BY = 522; + */ + SQL_SUPPORTED_GROUP_BY(522), + /** + *
+     *
+     * Retrieves a boolean value indicating whether specifying a LIKE escape clause is supported.
+     *
+     * Returns:
+     * - false: if specifying a LIKE escape clause is unsupported;
+     * - true: if specifying a LIKE escape clause is supported.
+     * 
+ * + * SQL_SUPPORTS_LIKE_ESCAPE_CLAUSE = 523; + */ + SQL_SUPPORTS_LIKE_ESCAPE_CLAUSE(523), + /** + *
+     *
+     * Retrieves a boolean value indicating whether columns may be defined as non-nullable.
+     *
+     * Returns:
+     * - false: if columns cannot be defined as non-nullable;
+     * - true: if columns may be defined as non-nullable.
+     * 
+ * + * SQL_SUPPORTS_NON_NULLABLE_COLUMNS = 524; + */ + SQL_SUPPORTS_NON_NULLABLE_COLUMNS(524), + /** + *
+     *
+     * Retrieves the supported SQL grammar level as per the ODBC specification.
+     *
+     * Returns an int32 bitmask value representing the supported SQL grammar level.
+     * The returned bitmask should be parsed in order to retrieve the supported grammar levels.
+     *
+     * For instance:
+     * - return 0 (\b0)   => [] (SQL grammar is unsupported);
+     * - return 1 (\b1)   => [SQL_MINIMUM_GRAMMAR];
+     * - return 2 (\b10)  => [SQL_CORE_GRAMMAR];
+     * - return 3 (\b11)  => [SQL_MINIMUM_GRAMMAR, SQL_CORE_GRAMMAR];
+     * - return 4 (\b100) => [SQL_EXTENDED_GRAMMAR];
+     * - return 5 (\b101) => [SQL_MINIMUM_GRAMMAR, SQL_EXTENDED_GRAMMAR];
+     * - return 6 (\b110) => [SQL_CORE_GRAMMAR, SQL_EXTENDED_GRAMMAR];
+     * - return 7 (\b111) => [SQL_MINIMUM_GRAMMAR, SQL_CORE_GRAMMAR, SQL_EXTENDED_GRAMMAR].
+     * Valid SQL grammar levels are described under `arrow.flight.protocol.sql.SupportedSqlGrammar`.
+     * 
+ * + * SQL_SUPPORTED_GRAMMAR = 525; + */ + SQL_SUPPORTED_GRAMMAR(525), + /** + *
+     *
+     * Retrieves the supported ANSI92 SQL grammar level.
+     *
+     * Returns an int32 bitmask value representing the supported ANSI92 SQL grammar level.
+     * The returned bitmask should be parsed in order to retrieve the supported commands.
+     *
+     * For instance:
+     * - return 0 (\b0)   => [] (ANSI92 SQL grammar is unsupported);
+     * - return 1 (\b1)   => [ANSI92_ENTRY_SQL];
+     * - return 2 (\b10)  => [ANSI92_INTERMEDIATE_SQL];
+     * - return 3 (\b11)  => [ANSI92_ENTRY_SQL, ANSI92_INTERMEDIATE_SQL];
+     * - return 4 (\b100) => [ANSI92_FULL_SQL];
+     * - return 5 (\b101) => [ANSI92_ENTRY_SQL, ANSI92_FULL_SQL];
+     * - return 6 (\b110) => [ANSI92_INTERMEDIATE_SQL, ANSI92_FULL_SQL];
+     * - return 7 (\b111) => [ANSI92_ENTRY_SQL, ANSI92_INTERMEDIATE_SQL, ANSI92_FULL_SQL].
+     * Valid ANSI92 SQL grammar levels are described under `arrow.flight.protocol.sql.SupportedAnsi92SqlGrammarLevel`.
+     * 
+ * + * SQL_ANSI92_SUPPORTED_LEVEL = 526; + */ + SQL_ANSI92_SUPPORTED_LEVEL(526), + /** + *
+     *
+     * Retrieves a boolean value indicating whether the SQL Integrity Enhancement Facility is supported.
+     *
+     * Returns:
+     * - false: if the SQL Integrity Enhancement Facility is supported;
+     * - true: if the SQL Integrity Enhancement Facility is supported.
+     * 
+ * + * SQL_SUPPORTS_INTEGRITY_ENHANCEMENT_FACILITY = 527; + */ + SQL_SUPPORTS_INTEGRITY_ENHANCEMENT_FACILITY(527), + /** + *
+     *
+     * Retrieves the support level for SQL OUTER JOINs.
+     *
+     * Returns a int32 ordinal for the SQL ordering being used, as described in
+     * `arrow.flight.protocol.sql.SqlOuterJoinsSupportLevel`.
+     * 
+ * + * SQL_OUTER_JOINS_SUPPORT_LEVEL = 528; + */ + SQL_OUTER_JOINS_SUPPORT_LEVEL(528), + /** + *
+     * Retrieves a UTF-8 string with the preferred term for "schema".
+     * 
+ * + * SQL_SCHEMA_TERM = 529; + */ + SQL_SCHEMA_TERM(529), + /** + *
+     * Retrieves a UTF-8 string with the preferred term for "procedure".
+     * 
+ * + * SQL_PROCEDURE_TERM = 530; + */ + SQL_PROCEDURE_TERM(530), + /** + *
+     *
+     * Retrieves a UTF-8 string with the preferred term for "catalog".
+     * If a empty string is returned its assumed that the server does NOT supports catalogs.
+     * 
+ * + * SQL_CATALOG_TERM = 531; + */ + SQL_CATALOG_TERM(531), + /** + *
+     *
+     * Retrieves a boolean value indicating whether a catalog appears at the start of a fully qualified table name.
+     *
+     * - false: if a catalog does not appear at the start of a fully qualified table name;
+     * - true: if a catalog appears at the start of a fully qualified table name.
+     * 
+ * + * SQL_CATALOG_AT_START = 532; + */ + SQL_CATALOG_AT_START(532), + /** + *
+     *
+     * Retrieves the supported actions for a SQL schema.
+     *
+     * Returns an int32 bitmask value representing the supported actions for a SQL schema.
+     * The returned bitmask should be parsed in order to retrieve the supported actions for a SQL schema.
+     *
+     * For instance:
+     * - return 0 (\b0)   => [] (no supported actions for SQL schema);
+     * - return 1 (\b1)   => [SQL_ELEMENT_IN_PROCEDURE_CALLS];
+     * - return 2 (\b10)  => [SQL_ELEMENT_IN_INDEX_DEFINITIONS];
+     * - return 3 (\b11)  => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_INDEX_DEFINITIONS];
+     * - return 4 (\b100) => [SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS];
+     * - return 5 (\b101) => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS];
+     * - return 6 (\b110) => [SQL_ELEMENT_IN_INDEX_DEFINITIONS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS];
+     * - return 7 (\b111) => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_INDEX_DEFINITIONS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS].
+     * Valid actions for a SQL schema described under `arrow.flight.protocol.sql.SqlSupportedElementActions`.
+     * 
+ * + * SQL_SCHEMAS_SUPPORTED_ACTIONS = 533; + */ + SQL_SCHEMAS_SUPPORTED_ACTIONS(533), + /** + *
+     *
+     * Retrieves the supported actions for a SQL schema.
+     *
+     * Returns an int32 bitmask value representing the supported actions for a SQL catalog.
+     * The returned bitmask should be parsed in order to retrieve the supported actions for a SQL catalog.
+     *
+     * For instance:
+     * - return 0 (\b0)   => [] (no supported actions for SQL catalog);
+     * - return 1 (\b1)   => [SQL_ELEMENT_IN_PROCEDURE_CALLS];
+     * - return 2 (\b10)  => [SQL_ELEMENT_IN_INDEX_DEFINITIONS];
+     * - return 3 (\b11)  => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_INDEX_DEFINITIONS];
+     * - return 4 (\b100) => [SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS];
+     * - return 5 (\b101) => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS];
+     * - return 6 (\b110) => [SQL_ELEMENT_IN_INDEX_DEFINITIONS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS];
+     * - return 7 (\b111) => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_INDEX_DEFINITIONS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS].
+     * Valid actions for a SQL catalog are described under `arrow.flight.protocol.sql.SqlSupportedElementActions`.
+     * 
+ * + * SQL_CATALOGS_SUPPORTED_ACTIONS = 534; + */ + SQL_CATALOGS_SUPPORTED_ACTIONS(534), + /** + *
+     *
+     * Retrieves the supported SQL positioned commands.
+     *
+     * Returns an int32 bitmask value representing the supported SQL positioned commands.
+     * The returned bitmask should be parsed in order to retrieve the supported SQL positioned commands.
+     *
+     * For instance:
+     * - return 0 (\b0)   => [] (no supported SQL positioned commands);
+     * - return 1 (\b1)   => [SQL_POSITIONED_DELETE];
+     * - return 2 (\b10)  => [SQL_POSITIONED_UPDATE];
+     * - return 3 (\b11)  => [SQL_POSITIONED_DELETE, SQL_POSITIONED_UPDATE].
+     * Valid SQL positioned commands are described under `arrow.flight.protocol.sql.SqlSupportedPositionedCommands`.
+     * 
+ * + * SQL_SUPPORTED_POSITIONED_COMMANDS = 535; + */ + SQL_SUPPORTED_POSITIONED_COMMANDS(535), + /** + *
+     *
+     * Retrieves a boolean value indicating whether SELECT FOR UPDATE statements are supported.
+     *
+     * Returns:
+     * - false: if SELECT FOR UPDATE statements are unsupported;
+     * - true: if SELECT FOR UPDATE statements are supported.
+     * 
+ * + * SQL_SELECT_FOR_UPDATE_SUPPORTED = 536; + */ + SQL_SELECT_FOR_UPDATE_SUPPORTED(536), + /** + *
+     *
+     * Retrieves a boolean value indicating whether stored procedure calls that use the stored procedure escape syntax
+     * are supported.
+     *
+     * Returns:
+     * - false: if stored procedure calls that use the stored procedure escape syntax are unsupported;
+     * - true: if stored procedure calls that use the stored procedure escape syntax are supported.
+     * 
+ * + * SQL_STORED_PROCEDURES_SUPPORTED = 537; + */ + SQL_STORED_PROCEDURES_SUPPORTED(537), + /** + *
+     *
+     * Retrieves the supported SQL subqueries.
+     *
+     * Returns an int32 bitmask value representing the supported SQL subqueries.
+     * The returned bitmask should be parsed in order to retrieve the supported SQL subqueries.
+     *
+     * For instance:
+     * - return 0   (\b0)     => [] (no supported SQL subqueries);
+     * - return 1   (\b1)     => [SQL_SUBQUERIES_IN_COMPARISONS];
+     * - return 2   (\b10)    => [SQL_SUBQUERIES_IN_EXISTS];
+     * - return 3   (\b11)    => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_EXISTS];
+     * - return 4   (\b100)   => [SQL_SUBQUERIES_IN_INS];
+     * - return 5   (\b101)   => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_INS];
+     * - return 6   (\b110)   => [SQL_SUBQUERIES_IN_INS, SQL_SUBQUERIES_IN_EXISTS];
+     * - return 7   (\b111)   => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_EXISTS, SQL_SUBQUERIES_IN_INS];
+     * - return 8   (\b1000)  => [SQL_SUBQUERIES_IN_QUANTIFIEDS];
+     * - return 9   (\b1001)  => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_QUANTIFIEDS];
+     * - return 10  (\b1010)  => [SQL_SUBQUERIES_IN_EXISTS, SQL_SUBQUERIES_IN_QUANTIFIEDS];
+     * - return 11  (\b1011)  => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_EXISTS, SQL_SUBQUERIES_IN_QUANTIFIEDS];
+     * - return 12  (\b1100)  => [SQL_SUBQUERIES_IN_INS, SQL_SUBQUERIES_IN_QUANTIFIEDS];
+     * - return 13  (\b1101)  => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_INS, SQL_SUBQUERIES_IN_QUANTIFIEDS];
+     * - return 14  (\b1110)  => [SQL_SUBQUERIES_IN_EXISTS, SQL_SUBQUERIES_IN_INS, SQL_SUBQUERIES_IN_QUANTIFIEDS];
+     * - return 15  (\b1111)  => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_EXISTS, SQL_SUBQUERIES_IN_INS, SQL_SUBQUERIES_IN_QUANTIFIEDS];
+     * - ...
+     * Valid SQL subqueries are described under `arrow.flight.protocol.sql.SqlSupportedSubqueries`.
+     * 
+ * + * SQL_SUPPORTED_SUBQUERIES = 538; + */ + SQL_SUPPORTED_SUBQUERIES(538), + /** + *
+     *
+     * Retrieves a boolean value indicating whether correlated subqueries are supported.
+     *
+     * Returns:
+     * - false: if correlated subqueries are unsupported;
+     * - true: if correlated subqueries are supported.
+     * 
+ * + * SQL_CORRELATED_SUBQUERIES_SUPPORTED = 539; + */ + SQL_CORRELATED_SUBQUERIES_SUPPORTED(539), + /** + *
+     *
+     * Retrieves the supported SQL UNIONs.
+     *
+     * Returns an int32 bitmask value representing the supported SQL UNIONs.
+     * The returned bitmask should be parsed in order to retrieve the supported SQL UNIONs.
+     *
+     * For instance:
+     * - return 0 (\b0)   => [] (no supported SQL positioned commands);
+     * - return 1 (\b1)   => [SQL_UNION];
+     * - return 2 (\b10)  => [SQL_UNION_ALL];
+     * - return 3 (\b11)  => [SQL_UNION, SQL_UNION_ALL].
+     * Valid SQL positioned commands are described under `arrow.flight.protocol.sql.SqlSupportedUnions`.
+     * 
+ * + * SQL_SUPPORTED_UNIONS = 540; + */ + SQL_SUPPORTED_UNIONS(540), + /** + *
+     * Retrieves a int64 value representing the maximum number of hex characters allowed in an inline binary literal.
+     * 
+ * + * SQL_MAX_BINARY_LITERAL_LENGTH = 541; + */ + SQL_MAX_BINARY_LITERAL_LENGTH(541), + /** + *
+     * Retrieves a int64 value representing the maximum number of characters allowed for a character literal.
+     * 
+ * + * SQL_MAX_CHAR_LITERAL_LENGTH = 542; + */ + SQL_MAX_CHAR_LITERAL_LENGTH(542), + /** + *
+     * Retrieves a int64 value representing the maximum number of characters allowed for a column name.
+     * 
+ * + * SQL_MAX_COLUMN_NAME_LENGTH = 543; + */ + SQL_MAX_COLUMN_NAME_LENGTH(543), + /** + *
+     * Retrieves a int64 value representing the maximum number of columns allowed in a GROUP BY clause.
+     * 
+ * + * SQL_MAX_COLUMNS_IN_GROUP_BY = 544; + */ + SQL_MAX_COLUMNS_IN_GROUP_BY(544), + /** + *
+     * Retrieves a int64 value representing the maximum number of columns allowed in an index.
+     * 
+ * + * SQL_MAX_COLUMNS_IN_INDEX = 545; + */ + SQL_MAX_COLUMNS_IN_INDEX(545), + /** + *
+     * Retrieves a int64 value representing the maximum number of columns allowed in an ORDER BY clause.
+     * 
+ * + * SQL_MAX_COLUMNS_IN_ORDER_BY = 546; + */ + SQL_MAX_COLUMNS_IN_ORDER_BY(546), + /** + *
+     * Retrieves a int64 value representing the maximum number of columns allowed in a SELECT list.
+     * 
+ * + * SQL_MAX_COLUMNS_IN_SELECT = 547; + */ + SQL_MAX_COLUMNS_IN_SELECT(547), + /** + *
+     * Retrieves a int64 value representing the maximum number of columns allowed in a table.
+     * 
+ * + * SQL_MAX_COLUMNS_IN_TABLE = 548; + */ + SQL_MAX_COLUMNS_IN_TABLE(548), + /** + *
+     * Retrieves a int64 value representing the maximum number of concurrent connections possible.
+     * 
+ * + * SQL_MAX_CONNECTIONS = 549; + */ + SQL_MAX_CONNECTIONS(549), + /** + *
+     * Retrieves a int64 value the maximum number of characters allowed in a cursor name.
+     * 
+ * + * SQL_MAX_CURSOR_NAME_LENGTH = 550; + */ + SQL_MAX_CURSOR_NAME_LENGTH(550), + /** + *
+     *
+     * Retrieves a int64 value representing the maximum number of bytes allowed for an index,
+     * including all of the parts of the index.
+     * 
+ * + * SQL_MAX_INDEX_LENGTH = 551; + */ + SQL_MAX_INDEX_LENGTH(551), + /** + *
+     * Retrieves a int64 value representing the maximum number of characters allowed in a schema name.
+     * 
+ * + * SQL_DB_SCHEMA_NAME_LENGTH = 552; + */ + SQL_DB_SCHEMA_NAME_LENGTH(552), + /** + *
+     * Retrieves a int64 value representing the maximum number of characters allowed in a procedure name.
+     * 
+ * + * SQL_MAX_PROCEDURE_NAME_LENGTH = 553; + */ + SQL_MAX_PROCEDURE_NAME_LENGTH(553), + /** + *
+     * Retrieves a int64 value representing the maximum number of characters allowed in a catalog name.
+     * 
+ * + * SQL_MAX_CATALOG_NAME_LENGTH = 554; + */ + SQL_MAX_CATALOG_NAME_LENGTH(554), + /** + *
+     * Retrieves a int64 value representing the maximum number of bytes allowed in a single row.
+     * 
+ * + * SQL_MAX_ROW_SIZE = 555; + */ + SQL_MAX_ROW_SIZE(555), + /** + *
+     *
+     * Retrieves a boolean indicating whether the return value for the JDBC method getMaxRowSize includes the SQL
+     * data types LONGVARCHAR and LONGVARBINARY.
+     *
+     * Returns:
+     * - false: if return value for the JDBC method getMaxRowSize does
+     *          not include the SQL data types LONGVARCHAR and LONGVARBINARY;
+     * - true: if return value for the JDBC method getMaxRowSize includes
+     *         the SQL data types LONGVARCHAR and LONGVARBINARY.
+     * 
+ * + * SQL_MAX_ROW_SIZE_INCLUDES_BLOBS = 556; + */ + SQL_MAX_ROW_SIZE_INCLUDES_BLOBS(556), + /** + *
+     *
+     * Retrieves a int64 value representing the maximum number of characters allowed for an SQL statement;
+     * a result of 0 (zero) means that there is no limit or the limit is not known.
+     * 
+ * + * SQL_MAX_STATEMENT_LENGTH = 557; + */ + SQL_MAX_STATEMENT_LENGTH(557), + /** + *
+     * Retrieves a int64 value representing the maximum number of active statements that can be open at the same time.
+     * 
+ * + * SQL_MAX_STATEMENTS = 558; + */ + SQL_MAX_STATEMENTS(558), + /** + *
+     * Retrieves a int64 value representing the maximum number of characters allowed in a table name.
+     * 
+ * + * SQL_MAX_TABLE_NAME_LENGTH = 559; + */ + SQL_MAX_TABLE_NAME_LENGTH(559), + /** + *
+     * Retrieves a int64 value representing the maximum number of tables allowed in a SELECT statement.
+     * 
+ * + * SQL_MAX_TABLES_IN_SELECT = 560; + */ + SQL_MAX_TABLES_IN_SELECT(560), + /** + *
+     * Retrieves a int64 value representing the maximum number of characters allowed in a user name.
+     * 
+ * + * SQL_MAX_USERNAME_LENGTH = 561; + */ + SQL_MAX_USERNAME_LENGTH(561), + /** + *
+     *
+     * Retrieves this database's default transaction isolation level as described in
+     * `arrow.flight.protocol.sql.SqlTransactionIsolationLevel`.
+     *
+     * Returns a int32 ordinal for the SQL transaction isolation level.
+     * 
+ * + * SQL_DEFAULT_TRANSACTION_ISOLATION = 562; + */ + SQL_DEFAULT_TRANSACTION_ISOLATION(562), + /** + *
+     *
+     * Retrieves a boolean value indicating whether transactions are supported. If not, invoking the method commit is a
+     * noop, and the isolation level is `arrow.flight.protocol.sql.SqlTransactionIsolationLevel.TRANSACTION_NONE`.
+     *
+     * Returns:
+     * - false: if transactions are unsupported;
+     * - true: if transactions are supported.
+     * 
+ * + * SQL_TRANSACTIONS_SUPPORTED = 563; + */ + SQL_TRANSACTIONS_SUPPORTED(563), + /** + *
+     *
+     * Retrieves the supported transactions isolation levels.
+     *
+     * Returns an int32 bitmask value representing the supported transactions isolation levels.
+     * The returned bitmask should be parsed in order to retrieve the supported transactions isolation levels.
+     *
+     * For instance:
+     * - return 0   (\b0)     => [] (no supported SQL transactions isolation levels);
+     * - return 1   (\b1)     => [SQL_TRANSACTION_NONE];
+     * - return 2   (\b10)    => [SQL_TRANSACTION_READ_UNCOMMITTED];
+     * - return 3   (\b11)    => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_READ_UNCOMMITTED];
+     * - return 4   (\b100)   => [SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 5   (\b101)   => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 6   (\b110)   => [SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 7   (\b111)   => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 8   (\b1000)  => [SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 9   (\b1001)  => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 10  (\b1010)  => [SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 11  (\b1011)  => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 12  (\b1100)  => [SQL_TRANSACTION_REPEATABLE_READ, SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 13  (\b1101)  => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_REPEATABLE_READ, SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 14  (\b1110)  => [SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ, SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 15  (\b1111)  => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ, SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 16  (\b10000) => [SQL_TRANSACTION_SERIALIZABLE];
+     * - ...
+     * Valid SQL positioned commands are described under `arrow.flight.protocol.sql.SqlTransactionIsolationLevel`.
+     * 
+ * + * SQL_SUPPORTED_TRANSACTIONS_ISOLATION_LEVELS = 564; + */ + SQL_SUPPORTED_TRANSACTIONS_ISOLATION_LEVELS(564), + /** + *
+     *
+     * Retrieves a boolean value indicating whether a data definition statement within a transaction forces
+     * the transaction to commit.
+     *
+     * Returns:
+     * - false: if a data definition statement within a transaction does not force the transaction to commit;
+     * - true: if a data definition statement within a transaction forces the transaction to commit.
+     * 
+ * + * SQL_DATA_DEFINITION_CAUSES_TRANSACTION_COMMIT = 565; + */ + SQL_DATA_DEFINITION_CAUSES_TRANSACTION_COMMIT(565), + /** + *
+     *
+     * Retrieves a boolean value indicating whether a data definition statement within a transaction is ignored.
+     *
+     * Returns:
+     * - false: if a data definition statement within a transaction is taken into account;
+     * - true: a data definition statement within a transaction is ignored.
+     * 
+ * + * SQL_DATA_DEFINITIONS_IN_TRANSACTIONS_IGNORED = 566; + */ + SQL_DATA_DEFINITIONS_IN_TRANSACTIONS_IGNORED(566), + /** + *
+     *
+     * Retrieves an int32 bitmask value representing the supported result set types.
+     * The returned bitmask should be parsed in order to retrieve the supported result set types.
+     *
+     * For instance:
+     * - return 0   (\b0)     => [] (no supported result set types);
+     * - return 1   (\b1)     => [SQL_RESULT_SET_TYPE_UNSPECIFIED];
+     * - return 2   (\b10)    => [SQL_RESULT_SET_TYPE_FORWARD_ONLY];
+     * - return 3   (\b11)    => [SQL_RESULT_SET_TYPE_UNSPECIFIED, SQL_RESULT_SET_TYPE_FORWARD_ONLY];
+     * - return 4   (\b100)   => [SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE];
+     * - return 5   (\b101)   => [SQL_RESULT_SET_TYPE_UNSPECIFIED, SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE];
+     * - return 6   (\b110)   => [SQL_RESULT_SET_TYPE_FORWARD_ONLY, SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE];
+     * - return 7   (\b111)   => [SQL_RESULT_SET_TYPE_UNSPECIFIED, SQL_RESULT_SET_TYPE_FORWARD_ONLY, SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE];
+     * - return 8   (\b1000)  => [SQL_RESULT_SET_TYPE_SCROLL_SENSITIVE];
+     * - ...
+     * Valid result set types are described under `arrow.flight.protocol.sql.SqlSupportedResultSetType`.
+     * 
+ * + * SQL_SUPPORTED_RESULT_SET_TYPES = 567; + */ + SQL_SUPPORTED_RESULT_SET_TYPES(567), + /** + *
+     *
+     * Returns an int32 bitmask value concurrency types supported for
+     * `arrow.flight.protocol.sql.SqlSupportedResultSetType.SQL_RESULT_SET_TYPE_UNSPECIFIED`.
+     *
+     * For instance:
+     * - return 0 (\b0)   => [] (no supported concurrency types for this result set type)
+     * - return 1 (\b1)   => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED]
+     * - return 2 (\b10)  => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY]
+     * - return 3 (\b11)  => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY]
+     * - return 4 (\b100) => [SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 5 (\b101) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 6 (\b110)  => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 7 (\b111)  => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * Valid result set types are described under `arrow.flight.protocol.sql.SqlSupportedResultSetConcurrency`.
+     * 
+ * + * SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_UNSPECIFIED = 568; + */ + SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_UNSPECIFIED(568), + /** + *
+     *
+     * Returns an int32 bitmask value concurrency types supported for
+     * `arrow.flight.protocol.sql.SqlSupportedResultSetType.SQL_RESULT_SET_TYPE_FORWARD_ONLY`.
+     *
+     * For instance:
+     * - return 0 (\b0)   => [] (no supported concurrency types for this result set type)
+     * - return 1 (\b1)   => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED]
+     * - return 2 (\b10)  => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY]
+     * - return 3 (\b11)  => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY]
+     * - return 4 (\b100) => [SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 5 (\b101) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 6 (\b110)  => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 7 (\b111)  => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * Valid result set types are described under `arrow.flight.protocol.sql.SqlSupportedResultSetConcurrency`.
+     * 
+ * + * SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_FORWARD_ONLY = 569; + */ + SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_FORWARD_ONLY(569), + /** + *
+     *
+     * Returns an int32 bitmask value concurrency types supported for
+     * `arrow.flight.protocol.sql.SqlSupportedResultSetType.SQL_RESULT_SET_TYPE_SCROLL_SENSITIVE`.
+     *
+     * For instance:
+     * - return 0 (\b0)   => [] (no supported concurrency types for this result set type)
+     * - return 1 (\b1)   => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED]
+     * - return 2 (\b10)  => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY]
+     * - return 3 (\b11)  => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY]
+     * - return 4 (\b100) => [SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 5 (\b101) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 6 (\b110)  => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 7 (\b111)  => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * Valid result set types are described under `arrow.flight.protocol.sql.SqlSupportedResultSetConcurrency`.
+     * 
+ * + * SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_SCROLL_SENSITIVE = 570; + */ + SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_SCROLL_SENSITIVE(570), + /** + *
+     *
+     * Returns an int32 bitmask value concurrency types supported for
+     * `arrow.flight.protocol.sql.SqlSupportedResultSetType.SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE`.
+     *
+     * For instance:
+     * - return 0 (\b0)   => [] (no supported concurrency types for this result set type)
+     * - return 1 (\b1)   => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED]
+     * - return 2 (\b10)  => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY]
+     * - return 3 (\b11)  => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY]
+     * - return 4 (\b100) => [SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 5 (\b101) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 6 (\b110)  => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 7 (\b111)  => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * Valid result set types are described under `arrow.flight.protocol.sql.SqlSupportedResultSetConcurrency`.
+     * 
+ * + * SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_SCROLL_INSENSITIVE = 571; + */ + SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_SCROLL_INSENSITIVE(571), + /** + *
+     *
+     * Retrieves a boolean value indicating whether this database supports batch updates.
+     *
+     * - false: if this database does not support batch updates;
+     * - true: if this database supports batch updates.
+     * 
+ * + * SQL_BATCH_UPDATES_SUPPORTED = 572; + */ + SQL_BATCH_UPDATES_SUPPORTED(572), + /** + *
+     *
+     * Retrieves a boolean value indicating whether this database supports savepoints.
+     *
+     * Returns:
+     * - false: if this database does not support savepoints;
+     * - true: if this database supports savepoints.
+     * 
+ * + * SQL_SAVEPOINTS_SUPPORTED = 573; + */ + SQL_SAVEPOINTS_SUPPORTED(573), + /** + *
+     *
+     * Retrieves a boolean value indicating whether named parameters are supported in callable statements.
+     *
+     * Returns:
+     * - false: if named parameters in callable statements are unsupported;
+     * - true: if named parameters in callable statements are supported.
+     * 
+ * + * SQL_NAMED_PARAMETERS_SUPPORTED = 574; + */ + SQL_NAMED_PARAMETERS_SUPPORTED(574), + /** + *
+     *
+     * Retrieves a boolean value indicating whether updates made to a LOB are made on a copy or directly to the LOB.
+     *
+     * Returns:
+     * - false: if updates made to a LOB are made directly to the LOB;
+     * - true: if updates made to a LOB are made on a copy.
+     * 
+ * + * SQL_LOCATORS_UPDATE_COPY = 575; + */ + SQL_LOCATORS_UPDATE_COPY(575), + /** + *
+     *
+     * Retrieves a boolean value indicating whether invoking user-defined or vendor functions
+     * using the stored procedure escape syntax is supported.
+     *
+     * Returns:
+     * - false: if invoking user-defined or vendor functions using the stored procedure escape syntax is unsupported;
+     * - true: if invoking user-defined or vendor functions using the stored procedure escape syntax is supported.
+     * 
+ * + * SQL_STORED_FUNCTIONS_USING_CALL_SYNTAX_SUPPORTED = 576; + */ + SQL_STORED_FUNCTIONS_USING_CALL_SYNTAX_SUPPORTED(576), + UNRECOGNIZED(-1), + ; + + /** + *
+     * Retrieves a UTF-8 string with the name of the Flight SQL Server.
+     * 
+ * + * FLIGHT_SQL_SERVER_NAME = 0; + */ + public static final int FLIGHT_SQL_SERVER_NAME_VALUE = 0; + /** + *
+     * Retrieves a UTF-8 string with the native version of the Flight SQL Server.
+     * 
+ * + * FLIGHT_SQL_SERVER_VERSION = 1; + */ + public static final int FLIGHT_SQL_SERVER_VERSION_VALUE = 1; + /** + *
+     * Retrieves a UTF-8 string with the Arrow format version of the Flight SQL Server.
+     * 
+ * + * FLIGHT_SQL_SERVER_ARROW_VERSION = 2; + */ + public static final int FLIGHT_SQL_SERVER_ARROW_VERSION_VALUE = 2; + /** + *
+     *
+     * Retrieves a boolean value indicating whether the Flight SQL Server is read only.
+     *
+     * Returns:
+     * - false: if read-write
+     * - true: if read only
+     * 
+ * + * FLIGHT_SQL_SERVER_READ_ONLY = 3; + */ + public static final int FLIGHT_SQL_SERVER_READ_ONLY_VALUE = 3; + /** + *
+     *
+     * Retrieves a boolean value indicating whether the Flight SQL Server supports executing
+     * SQL queries.
+     *
+     * Note that the absence of this info (as opposed to a false value) does not necessarily
+     * mean that SQL is not supported, as this property was not originally defined.
+     * 
+ * + * FLIGHT_SQL_SERVER_SQL = 4; + */ + public static final int FLIGHT_SQL_SERVER_SQL_VALUE = 4; + /** + *
+     *
+     * Retrieves a boolean value indicating whether the Flight SQL Server supports executing
+     * Substrait plans.
+     * 
+ * + * FLIGHT_SQL_SERVER_SUBSTRAIT = 5; + */ + public static final int FLIGHT_SQL_SERVER_SUBSTRAIT_VALUE = 5; + /** + *
+     *
+     * Retrieves a string value indicating the minimum supported Substrait version, or null
+     * if Substrait is not supported.
+     * 
+ * + * FLIGHT_SQL_SERVER_SUBSTRAIT_MIN_VERSION = 6; + */ + public static final int FLIGHT_SQL_SERVER_SUBSTRAIT_MIN_VERSION_VALUE = 6; + /** + *
+     *
+     * Retrieves a string value indicating the maximum supported Substrait version, or null
+     * if Substrait is not supported.
+     * 
+ * + * FLIGHT_SQL_SERVER_SUBSTRAIT_MAX_VERSION = 7; + */ + public static final int FLIGHT_SQL_SERVER_SUBSTRAIT_MAX_VERSION_VALUE = 7; + /** + *
+     *
+     * Retrieves an int32 indicating whether the Flight SQL Server supports the
+     * BeginTransaction/EndTransaction/BeginSavepoint/EndSavepoint actions.
+     *
+     * Even if this is not supported, the database may still support explicit "BEGIN
+     * TRANSACTION"/"COMMIT" SQL statements (see SQL_TRANSACTIONS_SUPPORTED); this property
+     * is only about whether the server implements the Flight SQL API endpoints.
+     *
+     * The possible values are listed in `SqlSupportedTransaction`.
+     * 
+ * + * FLIGHT_SQL_SERVER_TRANSACTION = 8; + */ + public static final int FLIGHT_SQL_SERVER_TRANSACTION_VALUE = 8; + /** + *
+     *
+     * Retrieves a boolean value indicating whether the Flight SQL Server supports explicit
+     * query cancellation (the CancelQuery action).
+     * 
+ * + * FLIGHT_SQL_SERVER_CANCEL = 9; + */ + public static final int FLIGHT_SQL_SERVER_CANCEL_VALUE = 9; + /** + *
+     *
+     * Retrieves an int32 indicating the timeout (in milliseconds) for prepared statement handles.
+     *
+     * If 0, there is no timeout.  Servers should reset the timeout when the handle is used in a command.
+     * 
+ * + * FLIGHT_SQL_SERVER_STATEMENT_TIMEOUT = 100; + */ + public static final int FLIGHT_SQL_SERVER_STATEMENT_TIMEOUT_VALUE = 100; + /** + *
+     *
+     * Retrieves an int32 indicating the timeout (in milliseconds) for transactions, since transactions are not tied to a connection.
+     *
+     * If 0, there is no timeout.  Servers should reset the timeout when the handle is used in a command.
+     * 
+ * + * FLIGHT_SQL_SERVER_TRANSACTION_TIMEOUT = 101; + */ + public static final int FLIGHT_SQL_SERVER_TRANSACTION_TIMEOUT_VALUE = 101; + /** + *
+     *
+     * Retrieves a boolean value indicating whether the Flight SQL Server supports CREATE and DROP of catalogs.
+     *
+     * Returns:
+     * - false: if it doesn't support CREATE and DROP of catalogs.
+     * - true: if it supports CREATE and DROP of catalogs.
+     * 
+ * + * SQL_DDL_CATALOG = 500; + */ + public static final int SQL_DDL_CATALOG_VALUE = 500; + /** + *
+     *
+     * Retrieves a boolean value indicating whether the Flight SQL Server supports CREATE and DROP of schemas.
+     *
+     * Returns:
+     * - false: if it doesn't support CREATE and DROP of schemas.
+     * - true: if it supports CREATE and DROP of schemas.
+     * 
+ * + * SQL_DDL_SCHEMA = 501; + */ + public static final int SQL_DDL_SCHEMA_VALUE = 501; + /** + *
+     *
+     * Indicates whether the Flight SQL Server supports CREATE and DROP of tables.
+     *
+     * Returns:
+     * - false: if it doesn't support CREATE and DROP of tables.
+     * - true: if it supports CREATE and DROP of tables.
+     * 
+ * + * SQL_DDL_TABLE = 502; + */ + public static final int SQL_DDL_TABLE_VALUE = 502; + /** + *
+     *
+     * Retrieves a int32 ordinal representing the case sensitivity of catalog, table, schema and table names.
+     *
+     * The possible values are listed in `arrow.flight.protocol.sql.SqlSupportedCaseSensitivity`.
+     * 
+ * + * SQL_IDENTIFIER_CASE = 503; + */ + public static final int SQL_IDENTIFIER_CASE_VALUE = 503; + /** + *
+     * Retrieves a UTF-8 string with the supported character(s) used to surround a delimited identifier.
+     * 
+ * + * SQL_IDENTIFIER_QUOTE_CHAR = 504; + */ + public static final int SQL_IDENTIFIER_QUOTE_CHAR_VALUE = 504; + /** + *
+     *
+     * Retrieves a int32 describing the case sensitivity of quoted identifiers.
+     *
+     * The possible values are listed in `arrow.flight.protocol.sql.SqlSupportedCaseSensitivity`.
+     * 
+ * + * SQL_QUOTED_IDENTIFIER_CASE = 505; + */ + public static final int SQL_QUOTED_IDENTIFIER_CASE_VALUE = 505; + /** + *
+     *
+     * Retrieves a boolean value indicating whether all tables are selectable.
+     *
+     * Returns:
+     * - false: if not all tables are selectable or if none are;
+     * - true: if all tables are selectable.
+     * 
+ * + * SQL_ALL_TABLES_ARE_SELECTABLE = 506; + */ + public static final int SQL_ALL_TABLES_ARE_SELECTABLE_VALUE = 506; + /** + *
+     *
+     * Retrieves the null ordering.
+     *
+     * Returns a int32 ordinal for the null ordering being used, as described in
+     * `arrow.flight.protocol.sql.SqlNullOrdering`.
+     * 
+ * + * SQL_NULL_ORDERING = 507; + */ + public static final int SQL_NULL_ORDERING_VALUE = 507; + /** + *
+     * Retrieves a UTF-8 string list with values of the supported keywords.
+     * 
+ * + * SQL_KEYWORDS = 508; + */ + public static final int SQL_KEYWORDS_VALUE = 508; + /** + *
+     * Retrieves a UTF-8 string list with values of the supported numeric functions.
+     * 
+ * + * SQL_NUMERIC_FUNCTIONS = 509; + */ + public static final int SQL_NUMERIC_FUNCTIONS_VALUE = 509; + /** + *
+     * Retrieves a UTF-8 string list with values of the supported string functions.
+     * 
+ * + * SQL_STRING_FUNCTIONS = 510; + */ + public static final int SQL_STRING_FUNCTIONS_VALUE = 510; + /** + *
+     * Retrieves a UTF-8 string list with values of the supported system functions.
+     * 
+ * + * SQL_SYSTEM_FUNCTIONS = 511; + */ + public static final int SQL_SYSTEM_FUNCTIONS_VALUE = 511; + /** + *
+     * Retrieves a UTF-8 string list with values of the supported datetime functions.
+     * 
+ * + * SQL_DATETIME_FUNCTIONS = 512; + */ + public static final int SQL_DATETIME_FUNCTIONS_VALUE = 512; + /** + *
+     *
+     * Retrieves the UTF-8 string that can be used to escape wildcard characters.
+     * This is the string that can be used to escape '_' or '%' in the catalog search parameters that are a pattern
+     * (and therefore use one of the wildcard characters).
+     * The '_' character represents any single character; the '%' character represents any sequence of zero or more
+     * characters.
+     * 
+ * + * SQL_SEARCH_STRING_ESCAPE = 513; + */ + public static final int SQL_SEARCH_STRING_ESCAPE_VALUE = 513; + /** + *
+     *
+     * Retrieves a UTF-8 string with all the "extra" characters that can be used in unquoted identifier names
+     * (those beyond a-z, A-Z, 0-9 and _).
+     * 
+ * + * SQL_EXTRA_NAME_CHARACTERS = 514; + */ + public static final int SQL_EXTRA_NAME_CHARACTERS_VALUE = 514; + /** + *
+     *
+     * Retrieves a boolean value indicating whether column aliasing is supported.
+     * If so, the SQL AS clause can be used to provide names for computed columns or to provide alias names for columns
+     * as required.
+     *
+     * Returns:
+     * - false: if column aliasing is unsupported;
+     * - true: if column aliasing is supported.
+     * 
+ * + * SQL_SUPPORTS_COLUMN_ALIASING = 515; + */ + public static final int SQL_SUPPORTS_COLUMN_ALIASING_VALUE = 515; + /** + *
+     *
+     * Retrieves a boolean value indicating whether concatenations between null and non-null values being
+     * null are supported.
+     *
+     * - Returns:
+     * - false: if concatenations between null and non-null values being null are unsupported;
+     * - true: if concatenations between null and non-null values being null are supported.
+     * 
+ * + * SQL_NULL_PLUS_NULL_IS_NULL = 516; + */ + public static final int SQL_NULL_PLUS_NULL_IS_NULL_VALUE = 516; + /** + *
+     *
+     * Retrieves a map where the key is the type to convert from and the value is a list with the types to convert to,
+     * indicating the supported conversions. Each key and each item on the list value is a value to a predefined type on
+     * SqlSupportsConvert enum.
+     * The returned map will be:  map<int32, list<int32>>
+     * 
+ * + * SQL_SUPPORTS_CONVERT = 517; + */ + public static final int SQL_SUPPORTS_CONVERT_VALUE = 517; + /** + *
+     *
+     * Retrieves a boolean value indicating whether, when table correlation names are supported,
+     * they are restricted to being different from the names of the tables.
+     *
+     * Returns:
+     * - false: if table correlation names are unsupported;
+     * - true: if table correlation names are supported.
+     * 
+ * + * SQL_SUPPORTS_TABLE_CORRELATION_NAMES = 518; + */ + public static final int SQL_SUPPORTS_TABLE_CORRELATION_NAMES_VALUE = 518; + /** + *
+     *
+     * Retrieves a boolean value indicating whether, when table correlation names are supported,
+     * they are restricted to being different from the names of the tables.
+     *
+     * Returns:
+     * - false: if different table correlation names are unsupported;
+     * - true: if different table correlation names are supported
+     * 
+ * + * SQL_SUPPORTS_DIFFERENT_TABLE_CORRELATION_NAMES = 519; + */ + public static final int SQL_SUPPORTS_DIFFERENT_TABLE_CORRELATION_NAMES_VALUE = 519; + /** + *
+     *
+     * Retrieves a boolean value indicating whether expressions in ORDER BY lists are supported.
+     *
+     * Returns:
+     * - false: if expressions in ORDER BY are unsupported;
+     * - true: if expressions in ORDER BY are supported;
+     * 
+ * + * SQL_SUPPORTS_EXPRESSIONS_IN_ORDER_BY = 520; + */ + public static final int SQL_SUPPORTS_EXPRESSIONS_IN_ORDER_BY_VALUE = 520; + /** + *
+     *
+     * Retrieves a boolean value indicating whether using a column that is not in the SELECT statement in a GROUP BY
+     * clause is supported.
+     *
+     * Returns:
+     * - false: if using a column that is not in the SELECT statement in a GROUP BY clause is unsupported;
+     * - true: if using a column that is not in the SELECT statement in a GROUP BY clause is supported.
+     * 
+ * + * SQL_SUPPORTS_ORDER_BY_UNRELATED = 521; + */ + public static final int SQL_SUPPORTS_ORDER_BY_UNRELATED_VALUE = 521; + /** + *
+     *
+     * Retrieves the supported GROUP BY commands;
+     *
+     * Returns an int32 bitmask value representing the supported commands.
+     * The returned bitmask should be parsed in order to retrieve the supported commands.
+     *
+     * For instance:
+     * - return 0 (\b0)   => [] (GROUP BY is unsupported);
+     * - return 1 (\b1)   => [SQL_GROUP_BY_UNRELATED];
+     * - return 2 (\b10)  => [SQL_GROUP_BY_BEYOND_SELECT];
+     * - return 3 (\b11)  => [SQL_GROUP_BY_UNRELATED, SQL_GROUP_BY_BEYOND_SELECT].
+     * Valid GROUP BY types are described under `arrow.flight.protocol.sql.SqlSupportedGroupBy`.
+     * 
+ * + * SQL_SUPPORTED_GROUP_BY = 522; + */ + public static final int SQL_SUPPORTED_GROUP_BY_VALUE = 522; + /** + *
+     *
+     * Retrieves a boolean value indicating whether specifying a LIKE escape clause is supported.
+     *
+     * Returns:
+     * - false: if specifying a LIKE escape clause is unsupported;
+     * - true: if specifying a LIKE escape clause is supported.
+     * 
+ * + * SQL_SUPPORTS_LIKE_ESCAPE_CLAUSE = 523; + */ + public static final int SQL_SUPPORTS_LIKE_ESCAPE_CLAUSE_VALUE = 523; + /** + *
+     *
+     * Retrieves a boolean value indicating whether columns may be defined as non-nullable.
+     *
+     * Returns:
+     * - false: if columns cannot be defined as non-nullable;
+     * - true: if columns may be defined as non-nullable.
+     * 
+ * + * SQL_SUPPORTS_NON_NULLABLE_COLUMNS = 524; + */ + public static final int SQL_SUPPORTS_NON_NULLABLE_COLUMNS_VALUE = 524; + /** + *
+     *
+     * Retrieves the supported SQL grammar level as per the ODBC specification.
+     *
+     * Returns an int32 bitmask value representing the supported SQL grammar level.
+     * The returned bitmask should be parsed in order to retrieve the supported grammar levels.
+     *
+     * For instance:
+     * - return 0 (\b0)   => [] (SQL grammar is unsupported);
+     * - return 1 (\b1)   => [SQL_MINIMUM_GRAMMAR];
+     * - return 2 (\b10)  => [SQL_CORE_GRAMMAR];
+     * - return 3 (\b11)  => [SQL_MINIMUM_GRAMMAR, SQL_CORE_GRAMMAR];
+     * - return 4 (\b100) => [SQL_EXTENDED_GRAMMAR];
+     * - return 5 (\b101) => [SQL_MINIMUM_GRAMMAR, SQL_EXTENDED_GRAMMAR];
+     * - return 6 (\b110) => [SQL_CORE_GRAMMAR, SQL_EXTENDED_GRAMMAR];
+     * - return 7 (\b111) => [SQL_MINIMUM_GRAMMAR, SQL_CORE_GRAMMAR, SQL_EXTENDED_GRAMMAR].
+     * Valid SQL grammar levels are described under `arrow.flight.protocol.sql.SupportedSqlGrammar`.
+     * 
+ * + * SQL_SUPPORTED_GRAMMAR = 525; + */ + public static final int SQL_SUPPORTED_GRAMMAR_VALUE = 525; + /** + *
+     *
+     * Retrieves the supported ANSI92 SQL grammar level.
+     *
+     * Returns an int32 bitmask value representing the supported ANSI92 SQL grammar level.
+     * The returned bitmask should be parsed in order to retrieve the supported commands.
+     *
+     * For instance:
+     * - return 0 (\b0)   => [] (ANSI92 SQL grammar is unsupported);
+     * - return 1 (\b1)   => [ANSI92_ENTRY_SQL];
+     * - return 2 (\b10)  => [ANSI92_INTERMEDIATE_SQL];
+     * - return 3 (\b11)  => [ANSI92_ENTRY_SQL, ANSI92_INTERMEDIATE_SQL];
+     * - return 4 (\b100) => [ANSI92_FULL_SQL];
+     * - return 5 (\b101) => [ANSI92_ENTRY_SQL, ANSI92_FULL_SQL];
+     * - return 6 (\b110) => [ANSI92_INTERMEDIATE_SQL, ANSI92_FULL_SQL];
+     * - return 7 (\b111) => [ANSI92_ENTRY_SQL, ANSI92_INTERMEDIATE_SQL, ANSI92_FULL_SQL].
+     * Valid ANSI92 SQL grammar levels are described under `arrow.flight.protocol.sql.SupportedAnsi92SqlGrammarLevel`.
+     * 
+ * + * SQL_ANSI92_SUPPORTED_LEVEL = 526; + */ + public static final int SQL_ANSI92_SUPPORTED_LEVEL_VALUE = 526; + /** + *
+     *
+     * Retrieves a boolean value indicating whether the SQL Integrity Enhancement Facility is supported.
+     *
+     * Returns:
+     * - false: if the SQL Integrity Enhancement Facility is supported;
+     * - true: if the SQL Integrity Enhancement Facility is supported.
+     * 
+ * + * SQL_SUPPORTS_INTEGRITY_ENHANCEMENT_FACILITY = 527; + */ + public static final int SQL_SUPPORTS_INTEGRITY_ENHANCEMENT_FACILITY_VALUE = 527; + /** + *
+     *
+     * Retrieves the support level for SQL OUTER JOINs.
+     *
+     * Returns a int32 ordinal for the SQL ordering being used, as described in
+     * `arrow.flight.protocol.sql.SqlOuterJoinsSupportLevel`.
+     * 
+ * + * SQL_OUTER_JOINS_SUPPORT_LEVEL = 528; + */ + public static final int SQL_OUTER_JOINS_SUPPORT_LEVEL_VALUE = 528; + /** + *
+     * Retrieves a UTF-8 string with the preferred term for "schema".
+     * 
+ * + * SQL_SCHEMA_TERM = 529; + */ + public static final int SQL_SCHEMA_TERM_VALUE = 529; + /** + *
+     * Retrieves a UTF-8 string with the preferred term for "procedure".
+     * 
+ * + * SQL_PROCEDURE_TERM = 530; + */ + public static final int SQL_PROCEDURE_TERM_VALUE = 530; + /** + *
+     *
+     * Retrieves a UTF-8 string with the preferred term for "catalog".
+     * If a empty string is returned its assumed that the server does NOT supports catalogs.
+     * 
+ * + * SQL_CATALOG_TERM = 531; + */ + public static final int SQL_CATALOG_TERM_VALUE = 531; + /** + *
+     *
+     * Retrieves a boolean value indicating whether a catalog appears at the start of a fully qualified table name.
+     *
+     * - false: if a catalog does not appear at the start of a fully qualified table name;
+     * - true: if a catalog appears at the start of a fully qualified table name.
+     * 
+ * + * SQL_CATALOG_AT_START = 532; + */ + public static final int SQL_CATALOG_AT_START_VALUE = 532; + /** + *
+     *
+     * Retrieves the supported actions for a SQL schema.
+     *
+     * Returns an int32 bitmask value representing the supported actions for a SQL schema.
+     * The returned bitmask should be parsed in order to retrieve the supported actions for a SQL schema.
+     *
+     * For instance:
+     * - return 0 (\b0)   => [] (no supported actions for SQL schema);
+     * - return 1 (\b1)   => [SQL_ELEMENT_IN_PROCEDURE_CALLS];
+     * - return 2 (\b10)  => [SQL_ELEMENT_IN_INDEX_DEFINITIONS];
+     * - return 3 (\b11)  => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_INDEX_DEFINITIONS];
+     * - return 4 (\b100) => [SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS];
+     * - return 5 (\b101) => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS];
+     * - return 6 (\b110) => [SQL_ELEMENT_IN_INDEX_DEFINITIONS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS];
+     * - return 7 (\b111) => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_INDEX_DEFINITIONS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS].
+     * Valid actions for a SQL schema described under `arrow.flight.protocol.sql.SqlSupportedElementActions`.
+     * 
+ * + * SQL_SCHEMAS_SUPPORTED_ACTIONS = 533; + */ + public static final int SQL_SCHEMAS_SUPPORTED_ACTIONS_VALUE = 533; + /** + *
+     *
+     * Retrieves the supported actions for a SQL schema.
+     *
+     * Returns an int32 bitmask value representing the supported actions for a SQL catalog.
+     * The returned bitmask should be parsed in order to retrieve the supported actions for a SQL catalog.
+     *
+     * For instance:
+     * - return 0 (\b0)   => [] (no supported actions for SQL catalog);
+     * - return 1 (\b1)   => [SQL_ELEMENT_IN_PROCEDURE_CALLS];
+     * - return 2 (\b10)  => [SQL_ELEMENT_IN_INDEX_DEFINITIONS];
+     * - return 3 (\b11)  => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_INDEX_DEFINITIONS];
+     * - return 4 (\b100) => [SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS];
+     * - return 5 (\b101) => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS];
+     * - return 6 (\b110) => [SQL_ELEMENT_IN_INDEX_DEFINITIONS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS];
+     * - return 7 (\b111) => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_INDEX_DEFINITIONS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS].
+     * Valid actions for a SQL catalog are described under `arrow.flight.protocol.sql.SqlSupportedElementActions`.
+     * 
+ * + * SQL_CATALOGS_SUPPORTED_ACTIONS = 534; + */ + public static final int SQL_CATALOGS_SUPPORTED_ACTIONS_VALUE = 534; + /** + *
+     *
+     * Retrieves the supported SQL positioned commands.
+     *
+     * Returns an int32 bitmask value representing the supported SQL positioned commands.
+     * The returned bitmask should be parsed in order to retrieve the supported SQL positioned commands.
+     *
+     * For instance:
+     * - return 0 (\b0)   => [] (no supported SQL positioned commands);
+     * - return 1 (\b1)   => [SQL_POSITIONED_DELETE];
+     * - return 2 (\b10)  => [SQL_POSITIONED_UPDATE];
+     * - return 3 (\b11)  => [SQL_POSITIONED_DELETE, SQL_POSITIONED_UPDATE].
+     * Valid SQL positioned commands are described under `arrow.flight.protocol.sql.SqlSupportedPositionedCommands`.
+     * 
+ * + * SQL_SUPPORTED_POSITIONED_COMMANDS = 535; + */ + public static final int SQL_SUPPORTED_POSITIONED_COMMANDS_VALUE = 535; + /** + *
+     *
+     * Retrieves a boolean value indicating whether SELECT FOR UPDATE statements are supported.
+     *
+     * Returns:
+     * - false: if SELECT FOR UPDATE statements are unsupported;
+     * - true: if SELECT FOR UPDATE statements are supported.
+     * 
+ * + * SQL_SELECT_FOR_UPDATE_SUPPORTED = 536; + */ + public static final int SQL_SELECT_FOR_UPDATE_SUPPORTED_VALUE = 536; + /** + *
+     *
+     * Retrieves a boolean value indicating whether stored procedure calls that use the stored procedure escape syntax
+     * are supported.
+     *
+     * Returns:
+     * - false: if stored procedure calls that use the stored procedure escape syntax are unsupported;
+     * - true: if stored procedure calls that use the stored procedure escape syntax are supported.
+     * 
+ * + * SQL_STORED_PROCEDURES_SUPPORTED = 537; + */ + public static final int SQL_STORED_PROCEDURES_SUPPORTED_VALUE = 537; + /** + *
+     *
+     * Retrieves the supported SQL subqueries.
+     *
+     * Returns an int32 bitmask value representing the supported SQL subqueries.
+     * The returned bitmask should be parsed in order to retrieve the supported SQL subqueries.
+     *
+     * For instance:
+     * - return 0   (\b0)     => [] (no supported SQL subqueries);
+     * - return 1   (\b1)     => [SQL_SUBQUERIES_IN_COMPARISONS];
+     * - return 2   (\b10)    => [SQL_SUBQUERIES_IN_EXISTS];
+     * - return 3   (\b11)    => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_EXISTS];
+     * - return 4   (\b100)   => [SQL_SUBQUERIES_IN_INS];
+     * - return 5   (\b101)   => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_INS];
+     * - return 6   (\b110)   => [SQL_SUBQUERIES_IN_INS, SQL_SUBQUERIES_IN_EXISTS];
+     * - return 7   (\b111)   => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_EXISTS, SQL_SUBQUERIES_IN_INS];
+     * - return 8   (\b1000)  => [SQL_SUBQUERIES_IN_QUANTIFIEDS];
+     * - return 9   (\b1001)  => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_QUANTIFIEDS];
+     * - return 10  (\b1010)  => [SQL_SUBQUERIES_IN_EXISTS, SQL_SUBQUERIES_IN_QUANTIFIEDS];
+     * - return 11  (\b1011)  => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_EXISTS, SQL_SUBQUERIES_IN_QUANTIFIEDS];
+     * - return 12  (\b1100)  => [SQL_SUBQUERIES_IN_INS, SQL_SUBQUERIES_IN_QUANTIFIEDS];
+     * - return 13  (\b1101)  => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_INS, SQL_SUBQUERIES_IN_QUANTIFIEDS];
+     * - return 14  (\b1110)  => [SQL_SUBQUERIES_IN_EXISTS, SQL_SUBQUERIES_IN_INS, SQL_SUBQUERIES_IN_QUANTIFIEDS];
+     * - return 15  (\b1111)  => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_EXISTS, SQL_SUBQUERIES_IN_INS, SQL_SUBQUERIES_IN_QUANTIFIEDS];
+     * - ...
+     * Valid SQL subqueries are described under `arrow.flight.protocol.sql.SqlSupportedSubqueries`.
+     * 
+ * + * SQL_SUPPORTED_SUBQUERIES = 538; + */ + public static final int SQL_SUPPORTED_SUBQUERIES_VALUE = 538; + /** + *
+     *
+     * Retrieves a boolean value indicating whether correlated subqueries are supported.
+     *
+     * Returns:
+     * - false: if correlated subqueries are unsupported;
+     * - true: if correlated subqueries are supported.
+     * 
+ * + * SQL_CORRELATED_SUBQUERIES_SUPPORTED = 539; + */ + public static final int SQL_CORRELATED_SUBQUERIES_SUPPORTED_VALUE = 539; + /** + *
+     *
+     * Retrieves the supported SQL UNIONs.
+     *
+     * Returns an int32 bitmask value representing the supported SQL UNIONs.
+     * The returned bitmask should be parsed in order to retrieve the supported SQL UNIONs.
+     *
+     * For instance:
+     * - return 0 (\b0)   => [] (no supported SQL positioned commands);
+     * - return 1 (\b1)   => [SQL_UNION];
+     * - return 2 (\b10)  => [SQL_UNION_ALL];
+     * - return 3 (\b11)  => [SQL_UNION, SQL_UNION_ALL].
+     * Valid SQL positioned commands are described under `arrow.flight.protocol.sql.SqlSupportedUnions`.
+     * 
+ * + * SQL_SUPPORTED_UNIONS = 540; + */ + public static final int SQL_SUPPORTED_UNIONS_VALUE = 540; + /** + *
+     * Retrieves a int64 value representing the maximum number of hex characters allowed in an inline binary literal.
+     * 
+ * + * SQL_MAX_BINARY_LITERAL_LENGTH = 541; + */ + public static final int SQL_MAX_BINARY_LITERAL_LENGTH_VALUE = 541; + /** + *
+     * Retrieves a int64 value representing the maximum number of characters allowed for a character literal.
+     * 
+ * + * SQL_MAX_CHAR_LITERAL_LENGTH = 542; + */ + public static final int SQL_MAX_CHAR_LITERAL_LENGTH_VALUE = 542; + /** + *
+     * Retrieves a int64 value representing the maximum number of characters allowed for a column name.
+     * 
+ * + * SQL_MAX_COLUMN_NAME_LENGTH = 543; + */ + public static final int SQL_MAX_COLUMN_NAME_LENGTH_VALUE = 543; + /** + *
+     * Retrieves a int64 value representing the maximum number of columns allowed in a GROUP BY clause.
+     * 
+ * + * SQL_MAX_COLUMNS_IN_GROUP_BY = 544; + */ + public static final int SQL_MAX_COLUMNS_IN_GROUP_BY_VALUE = 544; + /** + *
+     * Retrieves a int64 value representing the maximum number of columns allowed in an index.
+     * 
+ * + * SQL_MAX_COLUMNS_IN_INDEX = 545; + */ + public static final int SQL_MAX_COLUMNS_IN_INDEX_VALUE = 545; + /** + *
+     * Retrieves a int64 value representing the maximum number of columns allowed in an ORDER BY clause.
+     * 
+ * + * SQL_MAX_COLUMNS_IN_ORDER_BY = 546; + */ + public static final int SQL_MAX_COLUMNS_IN_ORDER_BY_VALUE = 546; + /** + *
+     * Retrieves a int64 value representing the maximum number of columns allowed in a SELECT list.
+     * 
+ * + * SQL_MAX_COLUMNS_IN_SELECT = 547; + */ + public static final int SQL_MAX_COLUMNS_IN_SELECT_VALUE = 547; + /** + *
+     * Retrieves a int64 value representing the maximum number of columns allowed in a table.
+     * 
+ * + * SQL_MAX_COLUMNS_IN_TABLE = 548; + */ + public static final int SQL_MAX_COLUMNS_IN_TABLE_VALUE = 548; + /** + *
+     * Retrieves a int64 value representing the maximum number of concurrent connections possible.
+     * 
+ * + * SQL_MAX_CONNECTIONS = 549; + */ + public static final int SQL_MAX_CONNECTIONS_VALUE = 549; + /** + *
+     * Retrieves a int64 value the maximum number of characters allowed in a cursor name.
+     * 
+ * + * SQL_MAX_CURSOR_NAME_LENGTH = 550; + */ + public static final int SQL_MAX_CURSOR_NAME_LENGTH_VALUE = 550; + /** + *
+     *
+     * Retrieves a int64 value representing the maximum number of bytes allowed for an index,
+     * including all of the parts of the index.
+     * 
+ * + * SQL_MAX_INDEX_LENGTH = 551; + */ + public static final int SQL_MAX_INDEX_LENGTH_VALUE = 551; + /** + *
+     * Retrieves a int64 value representing the maximum number of characters allowed in a schema name.
+     * 
+ * + * SQL_DB_SCHEMA_NAME_LENGTH = 552; + */ + public static final int SQL_DB_SCHEMA_NAME_LENGTH_VALUE = 552; + /** + *
+     * Retrieves a int64 value representing the maximum number of characters allowed in a procedure name.
+     * 
+ * + * SQL_MAX_PROCEDURE_NAME_LENGTH = 553; + */ + public static final int SQL_MAX_PROCEDURE_NAME_LENGTH_VALUE = 553; + /** + *
+     * Retrieves a int64 value representing the maximum number of characters allowed in a catalog name.
+     * 
+ * + * SQL_MAX_CATALOG_NAME_LENGTH = 554; + */ + public static final int SQL_MAX_CATALOG_NAME_LENGTH_VALUE = 554; + /** + *
+     * Retrieves a int64 value representing the maximum number of bytes allowed in a single row.
+     * 
+ * + * SQL_MAX_ROW_SIZE = 555; + */ + public static final int SQL_MAX_ROW_SIZE_VALUE = 555; + /** + *
+     *
+     * Retrieves a boolean indicating whether the return value for the JDBC method getMaxRowSize includes the SQL
+     * data types LONGVARCHAR and LONGVARBINARY.
+     *
+     * Returns:
+     * - false: if return value for the JDBC method getMaxRowSize does
+     *          not include the SQL data types LONGVARCHAR and LONGVARBINARY;
+     * - true: if return value for the JDBC method getMaxRowSize includes
+     *         the SQL data types LONGVARCHAR and LONGVARBINARY.
+     * 
+ * + * SQL_MAX_ROW_SIZE_INCLUDES_BLOBS = 556; + */ + public static final int SQL_MAX_ROW_SIZE_INCLUDES_BLOBS_VALUE = 556; + /** + *
+     *
+     * Retrieves a int64 value representing the maximum number of characters allowed for an SQL statement;
+     * a result of 0 (zero) means that there is no limit or the limit is not known.
+     * 
+ * + * SQL_MAX_STATEMENT_LENGTH = 557; + */ + public static final int SQL_MAX_STATEMENT_LENGTH_VALUE = 557; + /** + *
+     * Retrieves a int64 value representing the maximum number of active statements that can be open at the same time.
+     * 
+ * + * SQL_MAX_STATEMENTS = 558; + */ + public static final int SQL_MAX_STATEMENTS_VALUE = 558; + /** + *
+     * Retrieves a int64 value representing the maximum number of characters allowed in a table name.
+     * 
+ * + * SQL_MAX_TABLE_NAME_LENGTH = 559; + */ + public static final int SQL_MAX_TABLE_NAME_LENGTH_VALUE = 559; + /** + *
+     * Retrieves a int64 value representing the maximum number of tables allowed in a SELECT statement.
+     * 
+ * + * SQL_MAX_TABLES_IN_SELECT = 560; + */ + public static final int SQL_MAX_TABLES_IN_SELECT_VALUE = 560; + /** + *
+     * Retrieves a int64 value representing the maximum number of characters allowed in a user name.
+     * 
+ * + * SQL_MAX_USERNAME_LENGTH = 561; + */ + public static final int SQL_MAX_USERNAME_LENGTH_VALUE = 561; + /** + *
+     *
+     * Retrieves this database's default transaction isolation level as described in
+     * `arrow.flight.protocol.sql.SqlTransactionIsolationLevel`.
+     *
+     * Returns a int32 ordinal for the SQL transaction isolation level.
+     * 
+ * + * SQL_DEFAULT_TRANSACTION_ISOLATION = 562; + */ + public static final int SQL_DEFAULT_TRANSACTION_ISOLATION_VALUE = 562; + /** + *
+     *
+     * Retrieves a boolean value indicating whether transactions are supported. If not, invoking the method commit is a
+     * noop, and the isolation level is `arrow.flight.protocol.sql.SqlTransactionIsolationLevel.TRANSACTION_NONE`.
+     *
+     * Returns:
+     * - false: if transactions are unsupported;
+     * - true: if transactions are supported.
+     * 
+ * + * SQL_TRANSACTIONS_SUPPORTED = 563; + */ + public static final int SQL_TRANSACTIONS_SUPPORTED_VALUE = 563; + /** + *
+     *
+     * Retrieves the supported transactions isolation levels.
+     *
+     * Returns an int32 bitmask value representing the supported transactions isolation levels.
+     * The returned bitmask should be parsed in order to retrieve the supported transactions isolation levels.
+     *
+     * For instance:
+     * - return 0   (\b0)     => [] (no supported SQL transactions isolation levels);
+     * - return 1   (\b1)     => [SQL_TRANSACTION_NONE];
+     * - return 2   (\b10)    => [SQL_TRANSACTION_READ_UNCOMMITTED];
+     * - return 3   (\b11)    => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_READ_UNCOMMITTED];
+     * - return 4   (\b100)   => [SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 5   (\b101)   => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 6   (\b110)   => [SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 7   (\b111)   => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 8   (\b1000)  => [SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 9   (\b1001)  => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 10  (\b1010)  => [SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 11  (\b1011)  => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 12  (\b1100)  => [SQL_TRANSACTION_REPEATABLE_READ, SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 13  (\b1101)  => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_REPEATABLE_READ, SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 14  (\b1110)  => [SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ, SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 15  (\b1111)  => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ, SQL_TRANSACTION_REPEATABLE_READ];
+     * - return 16  (\b10000) => [SQL_TRANSACTION_SERIALIZABLE];
+     * - ...
+     * Valid SQL positioned commands are described under `arrow.flight.protocol.sql.SqlTransactionIsolationLevel`.
+     * 
+ * + * SQL_SUPPORTED_TRANSACTIONS_ISOLATION_LEVELS = 564; + */ + public static final int SQL_SUPPORTED_TRANSACTIONS_ISOLATION_LEVELS_VALUE = 564; + /** + *
+     *
+     * Retrieves a boolean value indicating whether a data definition statement within a transaction forces
+     * the transaction to commit.
+     *
+     * Returns:
+     * - false: if a data definition statement within a transaction does not force the transaction to commit;
+     * - true: if a data definition statement within a transaction forces the transaction to commit.
+     * 
+ * + * SQL_DATA_DEFINITION_CAUSES_TRANSACTION_COMMIT = 565; + */ + public static final int SQL_DATA_DEFINITION_CAUSES_TRANSACTION_COMMIT_VALUE = 565; + /** + *
+     *
+     * Retrieves a boolean value indicating whether a data definition statement within a transaction is ignored.
+     *
+     * Returns:
+     * - false: if a data definition statement within a transaction is taken into account;
+     * - true: a data definition statement within a transaction is ignored.
+     * 
+ * + * SQL_DATA_DEFINITIONS_IN_TRANSACTIONS_IGNORED = 566; + */ + public static final int SQL_DATA_DEFINITIONS_IN_TRANSACTIONS_IGNORED_VALUE = 566; + /** + *
+     *
+     * Retrieves an int32 bitmask value representing the supported result set types.
+     * The returned bitmask should be parsed in order to retrieve the supported result set types.
+     *
+     * For instance:
+     * - return 0   (\b0)     => [] (no supported result set types);
+     * - return 1   (\b1)     => [SQL_RESULT_SET_TYPE_UNSPECIFIED];
+     * - return 2   (\b10)    => [SQL_RESULT_SET_TYPE_FORWARD_ONLY];
+     * - return 3   (\b11)    => [SQL_RESULT_SET_TYPE_UNSPECIFIED, SQL_RESULT_SET_TYPE_FORWARD_ONLY];
+     * - return 4   (\b100)   => [SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE];
+     * - return 5   (\b101)   => [SQL_RESULT_SET_TYPE_UNSPECIFIED, SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE];
+     * - return 6   (\b110)   => [SQL_RESULT_SET_TYPE_FORWARD_ONLY, SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE];
+     * - return 7   (\b111)   => [SQL_RESULT_SET_TYPE_UNSPECIFIED, SQL_RESULT_SET_TYPE_FORWARD_ONLY, SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE];
+     * - return 8   (\b1000)  => [SQL_RESULT_SET_TYPE_SCROLL_SENSITIVE];
+     * - ...
+     * Valid result set types are described under `arrow.flight.protocol.sql.SqlSupportedResultSetType`.
+     * 
+ * + * SQL_SUPPORTED_RESULT_SET_TYPES = 567; + */ + public static final int SQL_SUPPORTED_RESULT_SET_TYPES_VALUE = 567; + /** + *
+     *
+     * Returns an int32 bitmask value concurrency types supported for
+     * `arrow.flight.protocol.sql.SqlSupportedResultSetType.SQL_RESULT_SET_TYPE_UNSPECIFIED`.
+     *
+     * For instance:
+     * - return 0 (\b0)   => [] (no supported concurrency types for this result set type)
+     * - return 1 (\b1)   => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED]
+     * - return 2 (\b10)  => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY]
+     * - return 3 (\b11)  => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY]
+     * - return 4 (\b100) => [SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 5 (\b101) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 6 (\b110)  => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 7 (\b111)  => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * Valid result set types are described under `arrow.flight.protocol.sql.SqlSupportedResultSetConcurrency`.
+     * 
+ * + * SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_UNSPECIFIED = 568; + */ + public static final int SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_UNSPECIFIED_VALUE = 568; + /** + *
+     *
+     * Returns an int32 bitmask value concurrency types supported for
+     * `arrow.flight.protocol.sql.SqlSupportedResultSetType.SQL_RESULT_SET_TYPE_FORWARD_ONLY`.
+     *
+     * For instance:
+     * - return 0 (\b0)   => [] (no supported concurrency types for this result set type)
+     * - return 1 (\b1)   => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED]
+     * - return 2 (\b10)  => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY]
+     * - return 3 (\b11)  => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY]
+     * - return 4 (\b100) => [SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 5 (\b101) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 6 (\b110)  => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 7 (\b111)  => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * Valid result set types are described under `arrow.flight.protocol.sql.SqlSupportedResultSetConcurrency`.
+     * 
+ * + * SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_FORWARD_ONLY = 569; + */ + public static final int SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_FORWARD_ONLY_VALUE = 569; + /** + *
+     *
+     * Returns an int32 bitmask value concurrency types supported for
+     * `arrow.flight.protocol.sql.SqlSupportedResultSetType.SQL_RESULT_SET_TYPE_SCROLL_SENSITIVE`.
+     *
+     * For instance:
+     * - return 0 (\b0)   => [] (no supported concurrency types for this result set type)
+     * - return 1 (\b1)   => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED]
+     * - return 2 (\b10)  => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY]
+     * - return 3 (\b11)  => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY]
+     * - return 4 (\b100) => [SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 5 (\b101) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 6 (\b110)  => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 7 (\b111)  => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * Valid result set types are described under `arrow.flight.protocol.sql.SqlSupportedResultSetConcurrency`.
+     * 
+ * + * SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_SCROLL_SENSITIVE = 570; + */ + public static final int SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_SCROLL_SENSITIVE_VALUE = 570; + /** + *
+     *
+     * Returns an int32 bitmask value concurrency types supported for
+     * `arrow.flight.protocol.sql.SqlSupportedResultSetType.SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE`.
+     *
+     * For instance:
+     * - return 0 (\b0)   => [] (no supported concurrency types for this result set type)
+     * - return 1 (\b1)   => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED]
+     * - return 2 (\b10)  => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY]
+     * - return 3 (\b11)  => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY]
+     * - return 4 (\b100) => [SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 5 (\b101) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 6 (\b110)  => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * - return 7 (\b111)  => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE]
+     * Valid result set types are described under `arrow.flight.protocol.sql.SqlSupportedResultSetConcurrency`.
+     * 
+ * + * SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_SCROLL_INSENSITIVE = 571; + */ + public static final int SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_SCROLL_INSENSITIVE_VALUE = 571; + /** + *
+     *
+     * Retrieves a boolean value indicating whether this database supports batch updates.
+     *
+     * - false: if this database does not support batch updates;
+     * - true: if this database supports batch updates.
+     * 
+ * + * SQL_BATCH_UPDATES_SUPPORTED = 572; + */ + public static final int SQL_BATCH_UPDATES_SUPPORTED_VALUE = 572; + /** + *
+     *
+     * Retrieves a boolean value indicating whether this database supports savepoints.
+     *
+     * Returns:
+     * - false: if this database does not support savepoints;
+     * - true: if this database supports savepoints.
+     * 
+ * + * SQL_SAVEPOINTS_SUPPORTED = 573; + */ + public static final int SQL_SAVEPOINTS_SUPPORTED_VALUE = 573; + /** + *
+     *
+     * Retrieves a boolean value indicating whether named parameters are supported in callable statements.
+     *
+     * Returns:
+     * - false: if named parameters in callable statements are unsupported;
+     * - true: if named parameters in callable statements are supported.
+     * 
+ * + * SQL_NAMED_PARAMETERS_SUPPORTED = 574; + */ + public static final int SQL_NAMED_PARAMETERS_SUPPORTED_VALUE = 574; + /** + *
+     *
+     * Retrieves a boolean value indicating whether updates made to a LOB are made on a copy or directly to the LOB.
+     *
+     * Returns:
+     * - false: if updates made to a LOB are made directly to the LOB;
+     * - true: if updates made to a LOB are made on a copy.
+     * 
+ * + * SQL_LOCATORS_UPDATE_COPY = 575; + */ + public static final int SQL_LOCATORS_UPDATE_COPY_VALUE = 575; + /** + *
+     *
+     * Retrieves a boolean value indicating whether invoking user-defined or vendor functions
+     * using the stored procedure escape syntax is supported.
+     *
+     * Returns:
+     * - false: if invoking user-defined or vendor functions using the stored procedure escape syntax is unsupported;
+     * - true: if invoking user-defined or vendor functions using the stored procedure escape syntax is supported.
+     * 
+ * + * SQL_STORED_FUNCTIONS_USING_CALL_SYNTAX_SUPPORTED = 576; + */ + public static final int SQL_STORED_FUNCTIONS_USING_CALL_SYNTAX_SUPPORTED_VALUE = 576; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static SqlInfo valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static SqlInfo forNumber(int value) { + switch (value) { + case 0: return FLIGHT_SQL_SERVER_NAME; + case 1: return FLIGHT_SQL_SERVER_VERSION; + case 2: return FLIGHT_SQL_SERVER_ARROW_VERSION; + case 3: return FLIGHT_SQL_SERVER_READ_ONLY; + case 4: return FLIGHT_SQL_SERVER_SQL; + case 5: return FLIGHT_SQL_SERVER_SUBSTRAIT; + case 6: return FLIGHT_SQL_SERVER_SUBSTRAIT_MIN_VERSION; + case 7: return FLIGHT_SQL_SERVER_SUBSTRAIT_MAX_VERSION; + case 8: return FLIGHT_SQL_SERVER_TRANSACTION; + case 9: return FLIGHT_SQL_SERVER_CANCEL; + case 100: return FLIGHT_SQL_SERVER_STATEMENT_TIMEOUT; + case 101: return FLIGHT_SQL_SERVER_TRANSACTION_TIMEOUT; + case 500: return SQL_DDL_CATALOG; + case 501: return SQL_DDL_SCHEMA; + case 502: return SQL_DDL_TABLE; + case 503: return SQL_IDENTIFIER_CASE; + case 504: return SQL_IDENTIFIER_QUOTE_CHAR; + case 505: return SQL_QUOTED_IDENTIFIER_CASE; + case 506: return SQL_ALL_TABLES_ARE_SELECTABLE; + case 507: return SQL_NULL_ORDERING; + case 508: return SQL_KEYWORDS; + case 509: return SQL_NUMERIC_FUNCTIONS; + case 510: return SQL_STRING_FUNCTIONS; + case 511: return SQL_SYSTEM_FUNCTIONS; + case 512: return SQL_DATETIME_FUNCTIONS; + case 513: return SQL_SEARCH_STRING_ESCAPE; + case 514: return SQL_EXTRA_NAME_CHARACTERS; + case 515: return SQL_SUPPORTS_COLUMN_ALIASING; + case 516: return SQL_NULL_PLUS_NULL_IS_NULL; + case 517: return SQL_SUPPORTS_CONVERT; + case 518: return SQL_SUPPORTS_TABLE_CORRELATION_NAMES; + case 519: return SQL_SUPPORTS_DIFFERENT_TABLE_CORRELATION_NAMES; + case 520: return SQL_SUPPORTS_EXPRESSIONS_IN_ORDER_BY; + case 521: return SQL_SUPPORTS_ORDER_BY_UNRELATED; + case 522: return SQL_SUPPORTED_GROUP_BY; + case 523: return SQL_SUPPORTS_LIKE_ESCAPE_CLAUSE; + case 524: return SQL_SUPPORTS_NON_NULLABLE_COLUMNS; + case 525: return SQL_SUPPORTED_GRAMMAR; + case 526: return SQL_ANSI92_SUPPORTED_LEVEL; + case 527: return SQL_SUPPORTS_INTEGRITY_ENHANCEMENT_FACILITY; + case 528: return SQL_OUTER_JOINS_SUPPORT_LEVEL; + case 529: return SQL_SCHEMA_TERM; + case 530: return SQL_PROCEDURE_TERM; + case 531: return SQL_CATALOG_TERM; + case 532: return SQL_CATALOG_AT_START; + case 533: return SQL_SCHEMAS_SUPPORTED_ACTIONS; + case 534: return SQL_CATALOGS_SUPPORTED_ACTIONS; + case 535: return SQL_SUPPORTED_POSITIONED_COMMANDS; + case 536: return SQL_SELECT_FOR_UPDATE_SUPPORTED; + case 537: return SQL_STORED_PROCEDURES_SUPPORTED; + case 538: return SQL_SUPPORTED_SUBQUERIES; + case 539: return SQL_CORRELATED_SUBQUERIES_SUPPORTED; + case 540: return SQL_SUPPORTED_UNIONS; + case 541: return SQL_MAX_BINARY_LITERAL_LENGTH; + case 542: return SQL_MAX_CHAR_LITERAL_LENGTH; + case 543: return SQL_MAX_COLUMN_NAME_LENGTH; + case 544: return SQL_MAX_COLUMNS_IN_GROUP_BY; + case 545: return SQL_MAX_COLUMNS_IN_INDEX; + case 546: return SQL_MAX_COLUMNS_IN_ORDER_BY; + case 547: return SQL_MAX_COLUMNS_IN_SELECT; + case 548: return SQL_MAX_COLUMNS_IN_TABLE; + case 549: return SQL_MAX_CONNECTIONS; + case 550: return SQL_MAX_CURSOR_NAME_LENGTH; + case 551: return SQL_MAX_INDEX_LENGTH; + case 552: return SQL_DB_SCHEMA_NAME_LENGTH; + case 553: return SQL_MAX_PROCEDURE_NAME_LENGTH; + case 554: return SQL_MAX_CATALOG_NAME_LENGTH; + case 555: return SQL_MAX_ROW_SIZE; + case 556: return SQL_MAX_ROW_SIZE_INCLUDES_BLOBS; + case 557: return SQL_MAX_STATEMENT_LENGTH; + case 558: return SQL_MAX_STATEMENTS; + case 559: return SQL_MAX_TABLE_NAME_LENGTH; + case 560: return SQL_MAX_TABLES_IN_SELECT; + case 561: return SQL_MAX_USERNAME_LENGTH; + case 562: return SQL_DEFAULT_TRANSACTION_ISOLATION; + case 563: return SQL_TRANSACTIONS_SUPPORTED; + case 564: return SQL_SUPPORTED_TRANSACTIONS_ISOLATION_LEVELS; + case 565: return SQL_DATA_DEFINITION_CAUSES_TRANSACTION_COMMIT; + case 566: return SQL_DATA_DEFINITIONS_IN_TRANSACTIONS_IGNORED; + case 567: return SQL_SUPPORTED_RESULT_SET_TYPES; + case 568: return SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_UNSPECIFIED; + case 569: return SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_FORWARD_ONLY; + case 570: return SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_SCROLL_SENSITIVE; + case 571: return SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_SCROLL_INSENSITIVE; + case 572: return SQL_BATCH_UPDATES_SUPPORTED; + case 573: return SQL_SAVEPOINTS_SUPPORTED; + case 574: return SQL_NAMED_PARAMETERS_SUPPORTED; + case 575: return SQL_LOCATORS_UPDATE_COPY; + case 576: return SQL_STORED_FUNCTIONS_USING_CALL_SYNTAX_SUPPORTED; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + SqlInfo> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public SqlInfo findValueByNumber(int number) { + return SqlInfo.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.getDescriptor().getEnumTypes().get(0); + } + + private static final SqlInfo[] VALUES = values(); + + public static SqlInfo valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private SqlInfo(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.SqlInfo) + } + + /** + *
+   * The level of support for Flight SQL transaction RPCs.
+   * 
+ * + * Protobuf enum {@code arrow.flight.protocol.sql.SqlSupportedTransaction} + */ + public enum SqlSupportedTransaction + implements com.google.protobuf.ProtocolMessageEnum { + /** + *
+     * Unknown/not indicated/no support
+     * 
+ * + * SQL_SUPPORTED_TRANSACTION_NONE = 0; + */ + SQL_SUPPORTED_TRANSACTION_NONE(0), + /** + *
+     * Transactions, but not savepoints.
+     * A savepoint is a mark within a transaction that can be individually
+     * rolled back to. Not all databases support savepoints.
+     * 
+ * + * SQL_SUPPORTED_TRANSACTION_TRANSACTION = 1; + */ + SQL_SUPPORTED_TRANSACTION_TRANSACTION(1), + /** + *
+     * Transactions and savepoints
+     * 
+ * + * SQL_SUPPORTED_TRANSACTION_SAVEPOINT = 2; + */ + SQL_SUPPORTED_TRANSACTION_SAVEPOINT(2), + UNRECOGNIZED(-1), + ; + + /** + *
+     * Unknown/not indicated/no support
+     * 
+ * + * SQL_SUPPORTED_TRANSACTION_NONE = 0; + */ + public static final int SQL_SUPPORTED_TRANSACTION_NONE_VALUE = 0; + /** + *
+     * Transactions, but not savepoints.
+     * A savepoint is a mark within a transaction that can be individually
+     * rolled back to. Not all databases support savepoints.
+     * 
+ * + * SQL_SUPPORTED_TRANSACTION_TRANSACTION = 1; + */ + public static final int SQL_SUPPORTED_TRANSACTION_TRANSACTION_VALUE = 1; + /** + *
+     * Transactions and savepoints
+     * 
+ * + * SQL_SUPPORTED_TRANSACTION_SAVEPOINT = 2; + */ + public static final int SQL_SUPPORTED_TRANSACTION_SAVEPOINT_VALUE = 2; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static SqlSupportedTransaction valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static SqlSupportedTransaction forNumber(int value) { + switch (value) { + case 0: return SQL_SUPPORTED_TRANSACTION_NONE; + case 1: return SQL_SUPPORTED_TRANSACTION_TRANSACTION; + case 2: return SQL_SUPPORTED_TRANSACTION_SAVEPOINT; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + SqlSupportedTransaction> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public SqlSupportedTransaction findValueByNumber(int number) { + return SqlSupportedTransaction.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.getDescriptor().getEnumTypes().get(1); + } + + private static final SqlSupportedTransaction[] VALUES = values(); + + public static SqlSupportedTransaction valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private SqlSupportedTransaction(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.SqlSupportedTransaction) + } + + /** + * Protobuf enum {@code arrow.flight.protocol.sql.SqlSupportedCaseSensitivity} + */ + public enum SqlSupportedCaseSensitivity + implements com.google.protobuf.ProtocolMessageEnum { + /** + * SQL_CASE_SENSITIVITY_UNKNOWN = 0; + */ + SQL_CASE_SENSITIVITY_UNKNOWN(0), + /** + * SQL_CASE_SENSITIVITY_CASE_INSENSITIVE = 1; + */ + SQL_CASE_SENSITIVITY_CASE_INSENSITIVE(1), + /** + * SQL_CASE_SENSITIVITY_UPPERCASE = 2; + */ + SQL_CASE_SENSITIVITY_UPPERCASE(2), + /** + * SQL_CASE_SENSITIVITY_LOWERCASE = 3; + */ + SQL_CASE_SENSITIVITY_LOWERCASE(3), + UNRECOGNIZED(-1), + ; + + /** + * SQL_CASE_SENSITIVITY_UNKNOWN = 0; + */ + public static final int SQL_CASE_SENSITIVITY_UNKNOWN_VALUE = 0; + /** + * SQL_CASE_SENSITIVITY_CASE_INSENSITIVE = 1; + */ + public static final int SQL_CASE_SENSITIVITY_CASE_INSENSITIVE_VALUE = 1; + /** + * SQL_CASE_SENSITIVITY_UPPERCASE = 2; + */ + public static final int SQL_CASE_SENSITIVITY_UPPERCASE_VALUE = 2; + /** + * SQL_CASE_SENSITIVITY_LOWERCASE = 3; + */ + public static final int SQL_CASE_SENSITIVITY_LOWERCASE_VALUE = 3; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static SqlSupportedCaseSensitivity valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static SqlSupportedCaseSensitivity forNumber(int value) { + switch (value) { + case 0: return SQL_CASE_SENSITIVITY_UNKNOWN; + case 1: return SQL_CASE_SENSITIVITY_CASE_INSENSITIVE; + case 2: return SQL_CASE_SENSITIVITY_UPPERCASE; + case 3: return SQL_CASE_SENSITIVITY_LOWERCASE; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + SqlSupportedCaseSensitivity> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public SqlSupportedCaseSensitivity findValueByNumber(int number) { + return SqlSupportedCaseSensitivity.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.getDescriptor().getEnumTypes().get(2); + } + + private static final SqlSupportedCaseSensitivity[] VALUES = values(); + + public static SqlSupportedCaseSensitivity valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private SqlSupportedCaseSensitivity(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.SqlSupportedCaseSensitivity) + } + + /** + * Protobuf enum {@code arrow.flight.protocol.sql.SqlNullOrdering} + */ + public enum SqlNullOrdering + implements com.google.protobuf.ProtocolMessageEnum { + /** + * SQL_NULLS_SORTED_HIGH = 0; + */ + SQL_NULLS_SORTED_HIGH(0), + /** + * SQL_NULLS_SORTED_LOW = 1; + */ + SQL_NULLS_SORTED_LOW(1), + /** + * SQL_NULLS_SORTED_AT_START = 2; + */ + SQL_NULLS_SORTED_AT_START(2), + /** + * SQL_NULLS_SORTED_AT_END = 3; + */ + SQL_NULLS_SORTED_AT_END(3), + UNRECOGNIZED(-1), + ; + + /** + * SQL_NULLS_SORTED_HIGH = 0; + */ + public static final int SQL_NULLS_SORTED_HIGH_VALUE = 0; + /** + * SQL_NULLS_SORTED_LOW = 1; + */ + public static final int SQL_NULLS_SORTED_LOW_VALUE = 1; + /** + * SQL_NULLS_SORTED_AT_START = 2; + */ + public static final int SQL_NULLS_SORTED_AT_START_VALUE = 2; + /** + * SQL_NULLS_SORTED_AT_END = 3; + */ + public static final int SQL_NULLS_SORTED_AT_END_VALUE = 3; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static SqlNullOrdering valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static SqlNullOrdering forNumber(int value) { + switch (value) { + case 0: return SQL_NULLS_SORTED_HIGH; + case 1: return SQL_NULLS_SORTED_LOW; + case 2: return SQL_NULLS_SORTED_AT_START; + case 3: return SQL_NULLS_SORTED_AT_END; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + SqlNullOrdering> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public SqlNullOrdering findValueByNumber(int number) { + return SqlNullOrdering.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.getDescriptor().getEnumTypes().get(3); + } + + private static final SqlNullOrdering[] VALUES = values(); + + public static SqlNullOrdering valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private SqlNullOrdering(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.SqlNullOrdering) + } + + /** + * Protobuf enum {@code arrow.flight.protocol.sql.SupportedSqlGrammar} + */ + public enum SupportedSqlGrammar + implements com.google.protobuf.ProtocolMessageEnum { + /** + * SQL_MINIMUM_GRAMMAR = 0; + */ + SQL_MINIMUM_GRAMMAR(0), + /** + * SQL_CORE_GRAMMAR = 1; + */ + SQL_CORE_GRAMMAR(1), + /** + * SQL_EXTENDED_GRAMMAR = 2; + */ + SQL_EXTENDED_GRAMMAR(2), + UNRECOGNIZED(-1), + ; + + /** + * SQL_MINIMUM_GRAMMAR = 0; + */ + public static final int SQL_MINIMUM_GRAMMAR_VALUE = 0; + /** + * SQL_CORE_GRAMMAR = 1; + */ + public static final int SQL_CORE_GRAMMAR_VALUE = 1; + /** + * SQL_EXTENDED_GRAMMAR = 2; + */ + public static final int SQL_EXTENDED_GRAMMAR_VALUE = 2; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static SupportedSqlGrammar valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static SupportedSqlGrammar forNumber(int value) { + switch (value) { + case 0: return SQL_MINIMUM_GRAMMAR; + case 1: return SQL_CORE_GRAMMAR; + case 2: return SQL_EXTENDED_GRAMMAR; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + SupportedSqlGrammar> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public SupportedSqlGrammar findValueByNumber(int number) { + return SupportedSqlGrammar.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.getDescriptor().getEnumTypes().get(4); + } + + private static final SupportedSqlGrammar[] VALUES = values(); + + public static SupportedSqlGrammar valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private SupportedSqlGrammar(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.SupportedSqlGrammar) + } + + /** + * Protobuf enum {@code arrow.flight.protocol.sql.SupportedAnsi92SqlGrammarLevel} + */ + public enum SupportedAnsi92SqlGrammarLevel + implements com.google.protobuf.ProtocolMessageEnum { + /** + * ANSI92_ENTRY_SQL = 0; + */ + ANSI92_ENTRY_SQL(0), + /** + * ANSI92_INTERMEDIATE_SQL = 1; + */ + ANSI92_INTERMEDIATE_SQL(1), + /** + * ANSI92_FULL_SQL = 2; + */ + ANSI92_FULL_SQL(2), + UNRECOGNIZED(-1), + ; + + /** + * ANSI92_ENTRY_SQL = 0; + */ + public static final int ANSI92_ENTRY_SQL_VALUE = 0; + /** + * ANSI92_INTERMEDIATE_SQL = 1; + */ + public static final int ANSI92_INTERMEDIATE_SQL_VALUE = 1; + /** + * ANSI92_FULL_SQL = 2; + */ + public static final int ANSI92_FULL_SQL_VALUE = 2; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static SupportedAnsi92SqlGrammarLevel valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static SupportedAnsi92SqlGrammarLevel forNumber(int value) { + switch (value) { + case 0: return ANSI92_ENTRY_SQL; + case 1: return ANSI92_INTERMEDIATE_SQL; + case 2: return ANSI92_FULL_SQL; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + SupportedAnsi92SqlGrammarLevel> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public SupportedAnsi92SqlGrammarLevel findValueByNumber(int number) { + return SupportedAnsi92SqlGrammarLevel.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.getDescriptor().getEnumTypes().get(5); + } + + private static final SupportedAnsi92SqlGrammarLevel[] VALUES = values(); + + public static SupportedAnsi92SqlGrammarLevel valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private SupportedAnsi92SqlGrammarLevel(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.SupportedAnsi92SqlGrammarLevel) + } + + /** + * Protobuf enum {@code arrow.flight.protocol.sql.SqlOuterJoinsSupportLevel} + */ + public enum SqlOuterJoinsSupportLevel + implements com.google.protobuf.ProtocolMessageEnum { + /** + * SQL_JOINS_UNSUPPORTED = 0; + */ + SQL_JOINS_UNSUPPORTED(0), + /** + * SQL_LIMITED_OUTER_JOINS = 1; + */ + SQL_LIMITED_OUTER_JOINS(1), + /** + * SQL_FULL_OUTER_JOINS = 2; + */ + SQL_FULL_OUTER_JOINS(2), + UNRECOGNIZED(-1), + ; + + /** + * SQL_JOINS_UNSUPPORTED = 0; + */ + public static final int SQL_JOINS_UNSUPPORTED_VALUE = 0; + /** + * SQL_LIMITED_OUTER_JOINS = 1; + */ + public static final int SQL_LIMITED_OUTER_JOINS_VALUE = 1; + /** + * SQL_FULL_OUTER_JOINS = 2; + */ + public static final int SQL_FULL_OUTER_JOINS_VALUE = 2; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static SqlOuterJoinsSupportLevel valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static SqlOuterJoinsSupportLevel forNumber(int value) { + switch (value) { + case 0: return SQL_JOINS_UNSUPPORTED; + case 1: return SQL_LIMITED_OUTER_JOINS; + case 2: return SQL_FULL_OUTER_JOINS; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + SqlOuterJoinsSupportLevel> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public SqlOuterJoinsSupportLevel findValueByNumber(int number) { + return SqlOuterJoinsSupportLevel.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.getDescriptor().getEnumTypes().get(6); + } + + private static final SqlOuterJoinsSupportLevel[] VALUES = values(); + + public static SqlOuterJoinsSupportLevel valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private SqlOuterJoinsSupportLevel(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.SqlOuterJoinsSupportLevel) + } + + /** + * Protobuf enum {@code arrow.flight.protocol.sql.SqlSupportedGroupBy} + */ + public enum SqlSupportedGroupBy + implements com.google.protobuf.ProtocolMessageEnum { + /** + * SQL_GROUP_BY_UNRELATED = 0; + */ + SQL_GROUP_BY_UNRELATED(0), + /** + * SQL_GROUP_BY_BEYOND_SELECT = 1; + */ + SQL_GROUP_BY_BEYOND_SELECT(1), + UNRECOGNIZED(-1), + ; + + /** + * SQL_GROUP_BY_UNRELATED = 0; + */ + public static final int SQL_GROUP_BY_UNRELATED_VALUE = 0; + /** + * SQL_GROUP_BY_BEYOND_SELECT = 1; + */ + public static final int SQL_GROUP_BY_BEYOND_SELECT_VALUE = 1; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static SqlSupportedGroupBy valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static SqlSupportedGroupBy forNumber(int value) { + switch (value) { + case 0: return SQL_GROUP_BY_UNRELATED; + case 1: return SQL_GROUP_BY_BEYOND_SELECT; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + SqlSupportedGroupBy> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public SqlSupportedGroupBy findValueByNumber(int number) { + return SqlSupportedGroupBy.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.getDescriptor().getEnumTypes().get(7); + } + + private static final SqlSupportedGroupBy[] VALUES = values(); + + public static SqlSupportedGroupBy valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private SqlSupportedGroupBy(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.SqlSupportedGroupBy) + } + + /** + * Protobuf enum {@code arrow.flight.protocol.sql.SqlSupportedElementActions} + */ + public enum SqlSupportedElementActions + implements com.google.protobuf.ProtocolMessageEnum { + /** + * SQL_ELEMENT_IN_PROCEDURE_CALLS = 0; + */ + SQL_ELEMENT_IN_PROCEDURE_CALLS(0), + /** + * SQL_ELEMENT_IN_INDEX_DEFINITIONS = 1; + */ + SQL_ELEMENT_IN_INDEX_DEFINITIONS(1), + /** + * SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS = 2; + */ + SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS(2), + UNRECOGNIZED(-1), + ; + + /** + * SQL_ELEMENT_IN_PROCEDURE_CALLS = 0; + */ + public static final int SQL_ELEMENT_IN_PROCEDURE_CALLS_VALUE = 0; + /** + * SQL_ELEMENT_IN_INDEX_DEFINITIONS = 1; + */ + public static final int SQL_ELEMENT_IN_INDEX_DEFINITIONS_VALUE = 1; + /** + * SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS = 2; + */ + public static final int SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS_VALUE = 2; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static SqlSupportedElementActions valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static SqlSupportedElementActions forNumber(int value) { + switch (value) { + case 0: return SQL_ELEMENT_IN_PROCEDURE_CALLS; + case 1: return SQL_ELEMENT_IN_INDEX_DEFINITIONS; + case 2: return SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + SqlSupportedElementActions> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public SqlSupportedElementActions findValueByNumber(int number) { + return SqlSupportedElementActions.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.getDescriptor().getEnumTypes().get(8); + } + + private static final SqlSupportedElementActions[] VALUES = values(); + + public static SqlSupportedElementActions valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private SqlSupportedElementActions(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.SqlSupportedElementActions) + } + + /** + * Protobuf enum {@code arrow.flight.protocol.sql.SqlSupportedPositionedCommands} + */ + public enum SqlSupportedPositionedCommands + implements com.google.protobuf.ProtocolMessageEnum { + /** + * SQL_POSITIONED_DELETE = 0; + */ + SQL_POSITIONED_DELETE(0), + /** + * SQL_POSITIONED_UPDATE = 1; + */ + SQL_POSITIONED_UPDATE(1), + UNRECOGNIZED(-1), + ; + + /** + * SQL_POSITIONED_DELETE = 0; + */ + public static final int SQL_POSITIONED_DELETE_VALUE = 0; + /** + * SQL_POSITIONED_UPDATE = 1; + */ + public static final int SQL_POSITIONED_UPDATE_VALUE = 1; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static SqlSupportedPositionedCommands valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static SqlSupportedPositionedCommands forNumber(int value) { + switch (value) { + case 0: return SQL_POSITIONED_DELETE; + case 1: return SQL_POSITIONED_UPDATE; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + SqlSupportedPositionedCommands> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public SqlSupportedPositionedCommands findValueByNumber(int number) { + return SqlSupportedPositionedCommands.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.getDescriptor().getEnumTypes().get(9); + } + + private static final SqlSupportedPositionedCommands[] VALUES = values(); + + public static SqlSupportedPositionedCommands valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private SqlSupportedPositionedCommands(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.SqlSupportedPositionedCommands) + } + + /** + * Protobuf enum {@code arrow.flight.protocol.sql.SqlSupportedSubqueries} + */ + public enum SqlSupportedSubqueries + implements com.google.protobuf.ProtocolMessageEnum { + /** + * SQL_SUBQUERIES_IN_COMPARISONS = 0; + */ + SQL_SUBQUERIES_IN_COMPARISONS(0), + /** + * SQL_SUBQUERIES_IN_EXISTS = 1; + */ + SQL_SUBQUERIES_IN_EXISTS(1), + /** + * SQL_SUBQUERIES_IN_INS = 2; + */ + SQL_SUBQUERIES_IN_INS(2), + /** + * SQL_SUBQUERIES_IN_QUANTIFIEDS = 3; + */ + SQL_SUBQUERIES_IN_QUANTIFIEDS(3), + UNRECOGNIZED(-1), + ; + + /** + * SQL_SUBQUERIES_IN_COMPARISONS = 0; + */ + public static final int SQL_SUBQUERIES_IN_COMPARISONS_VALUE = 0; + /** + * SQL_SUBQUERIES_IN_EXISTS = 1; + */ + public static final int SQL_SUBQUERIES_IN_EXISTS_VALUE = 1; + /** + * SQL_SUBQUERIES_IN_INS = 2; + */ + public static final int SQL_SUBQUERIES_IN_INS_VALUE = 2; + /** + * SQL_SUBQUERIES_IN_QUANTIFIEDS = 3; + */ + public static final int SQL_SUBQUERIES_IN_QUANTIFIEDS_VALUE = 3; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static SqlSupportedSubqueries valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static SqlSupportedSubqueries forNumber(int value) { + switch (value) { + case 0: return SQL_SUBQUERIES_IN_COMPARISONS; + case 1: return SQL_SUBQUERIES_IN_EXISTS; + case 2: return SQL_SUBQUERIES_IN_INS; + case 3: return SQL_SUBQUERIES_IN_QUANTIFIEDS; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + SqlSupportedSubqueries> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public SqlSupportedSubqueries findValueByNumber(int number) { + return SqlSupportedSubqueries.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.getDescriptor().getEnumTypes().get(10); + } + + private static final SqlSupportedSubqueries[] VALUES = values(); + + public static SqlSupportedSubqueries valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private SqlSupportedSubqueries(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.SqlSupportedSubqueries) + } + + /** + * Protobuf enum {@code arrow.flight.protocol.sql.SqlSupportedUnions} + */ + public enum SqlSupportedUnions + implements com.google.protobuf.ProtocolMessageEnum { + /** + * SQL_UNION = 0; + */ + SQL_UNION(0), + /** + * SQL_UNION_ALL = 1; + */ + SQL_UNION_ALL(1), + UNRECOGNIZED(-1), + ; + + /** + * SQL_UNION = 0; + */ + public static final int SQL_UNION_VALUE = 0; + /** + * SQL_UNION_ALL = 1; + */ + public static final int SQL_UNION_ALL_VALUE = 1; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static SqlSupportedUnions valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static SqlSupportedUnions forNumber(int value) { + switch (value) { + case 0: return SQL_UNION; + case 1: return SQL_UNION_ALL; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + SqlSupportedUnions> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public SqlSupportedUnions findValueByNumber(int number) { + return SqlSupportedUnions.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.getDescriptor().getEnumTypes().get(11); + } + + private static final SqlSupportedUnions[] VALUES = values(); + + public static SqlSupportedUnions valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private SqlSupportedUnions(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.SqlSupportedUnions) + } + + /** + * Protobuf enum {@code arrow.flight.protocol.sql.SqlTransactionIsolationLevel} + */ + public enum SqlTransactionIsolationLevel + implements com.google.protobuf.ProtocolMessageEnum { + /** + * SQL_TRANSACTION_NONE = 0; + */ + SQL_TRANSACTION_NONE(0), + /** + * SQL_TRANSACTION_READ_UNCOMMITTED = 1; + */ + SQL_TRANSACTION_READ_UNCOMMITTED(1), + /** + * SQL_TRANSACTION_READ_COMMITTED = 2; + */ + SQL_TRANSACTION_READ_COMMITTED(2), + /** + * SQL_TRANSACTION_REPEATABLE_READ = 3; + */ + SQL_TRANSACTION_REPEATABLE_READ(3), + /** + * SQL_TRANSACTION_SERIALIZABLE = 4; + */ + SQL_TRANSACTION_SERIALIZABLE(4), + UNRECOGNIZED(-1), + ; + + /** + * SQL_TRANSACTION_NONE = 0; + */ + public static final int SQL_TRANSACTION_NONE_VALUE = 0; + /** + * SQL_TRANSACTION_READ_UNCOMMITTED = 1; + */ + public static final int SQL_TRANSACTION_READ_UNCOMMITTED_VALUE = 1; + /** + * SQL_TRANSACTION_READ_COMMITTED = 2; + */ + public static final int SQL_TRANSACTION_READ_COMMITTED_VALUE = 2; + /** + * SQL_TRANSACTION_REPEATABLE_READ = 3; + */ + public static final int SQL_TRANSACTION_REPEATABLE_READ_VALUE = 3; + /** + * SQL_TRANSACTION_SERIALIZABLE = 4; + */ + public static final int SQL_TRANSACTION_SERIALIZABLE_VALUE = 4; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static SqlTransactionIsolationLevel valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static SqlTransactionIsolationLevel forNumber(int value) { + switch (value) { + case 0: return SQL_TRANSACTION_NONE; + case 1: return SQL_TRANSACTION_READ_UNCOMMITTED; + case 2: return SQL_TRANSACTION_READ_COMMITTED; + case 3: return SQL_TRANSACTION_REPEATABLE_READ; + case 4: return SQL_TRANSACTION_SERIALIZABLE; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + SqlTransactionIsolationLevel> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public SqlTransactionIsolationLevel findValueByNumber(int number) { + return SqlTransactionIsolationLevel.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.getDescriptor().getEnumTypes().get(12); + } + + private static final SqlTransactionIsolationLevel[] VALUES = values(); + + public static SqlTransactionIsolationLevel valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private SqlTransactionIsolationLevel(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.SqlTransactionIsolationLevel) + } + + /** + * Protobuf enum {@code arrow.flight.protocol.sql.SqlSupportedTransactions} + */ + public enum SqlSupportedTransactions + implements com.google.protobuf.ProtocolMessageEnum { + /** + * SQL_TRANSACTION_UNSPECIFIED = 0; + */ + SQL_TRANSACTION_UNSPECIFIED(0), + /** + * SQL_DATA_DEFINITION_TRANSACTIONS = 1; + */ + SQL_DATA_DEFINITION_TRANSACTIONS(1), + /** + * SQL_DATA_MANIPULATION_TRANSACTIONS = 2; + */ + SQL_DATA_MANIPULATION_TRANSACTIONS(2), + UNRECOGNIZED(-1), + ; + + /** + * SQL_TRANSACTION_UNSPECIFIED = 0; + */ + public static final int SQL_TRANSACTION_UNSPECIFIED_VALUE = 0; + /** + * SQL_DATA_DEFINITION_TRANSACTIONS = 1; + */ + public static final int SQL_DATA_DEFINITION_TRANSACTIONS_VALUE = 1; + /** + * SQL_DATA_MANIPULATION_TRANSACTIONS = 2; + */ + public static final int SQL_DATA_MANIPULATION_TRANSACTIONS_VALUE = 2; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static SqlSupportedTransactions valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static SqlSupportedTransactions forNumber(int value) { + switch (value) { + case 0: return SQL_TRANSACTION_UNSPECIFIED; + case 1: return SQL_DATA_DEFINITION_TRANSACTIONS; + case 2: return SQL_DATA_MANIPULATION_TRANSACTIONS; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + SqlSupportedTransactions> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public SqlSupportedTransactions findValueByNumber(int number) { + return SqlSupportedTransactions.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.getDescriptor().getEnumTypes().get(13); + } + + private static final SqlSupportedTransactions[] VALUES = values(); + + public static SqlSupportedTransactions valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private SqlSupportedTransactions(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.SqlSupportedTransactions) + } + + /** + * Protobuf enum {@code arrow.flight.protocol.sql.SqlSupportedResultSetType} + */ + public enum SqlSupportedResultSetType + implements com.google.protobuf.ProtocolMessageEnum { + /** + * SQL_RESULT_SET_TYPE_UNSPECIFIED = 0; + */ + SQL_RESULT_SET_TYPE_UNSPECIFIED(0), + /** + * SQL_RESULT_SET_TYPE_FORWARD_ONLY = 1; + */ + SQL_RESULT_SET_TYPE_FORWARD_ONLY(1), + /** + * SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE = 2; + */ + SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE(2), + /** + * SQL_RESULT_SET_TYPE_SCROLL_SENSITIVE = 3; + */ + SQL_RESULT_SET_TYPE_SCROLL_SENSITIVE(3), + UNRECOGNIZED(-1), + ; + + /** + * SQL_RESULT_SET_TYPE_UNSPECIFIED = 0; + */ + public static final int SQL_RESULT_SET_TYPE_UNSPECIFIED_VALUE = 0; + /** + * SQL_RESULT_SET_TYPE_FORWARD_ONLY = 1; + */ + public static final int SQL_RESULT_SET_TYPE_FORWARD_ONLY_VALUE = 1; + /** + * SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE = 2; + */ + public static final int SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE_VALUE = 2; + /** + * SQL_RESULT_SET_TYPE_SCROLL_SENSITIVE = 3; + */ + public static final int SQL_RESULT_SET_TYPE_SCROLL_SENSITIVE_VALUE = 3; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static SqlSupportedResultSetType valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static SqlSupportedResultSetType forNumber(int value) { + switch (value) { + case 0: return SQL_RESULT_SET_TYPE_UNSPECIFIED; + case 1: return SQL_RESULT_SET_TYPE_FORWARD_ONLY; + case 2: return SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE; + case 3: return SQL_RESULT_SET_TYPE_SCROLL_SENSITIVE; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + SqlSupportedResultSetType> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public SqlSupportedResultSetType findValueByNumber(int number) { + return SqlSupportedResultSetType.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.getDescriptor().getEnumTypes().get(14); + } + + private static final SqlSupportedResultSetType[] VALUES = values(); + + public static SqlSupportedResultSetType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private SqlSupportedResultSetType(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.SqlSupportedResultSetType) + } + + /** + * Protobuf enum {@code arrow.flight.protocol.sql.SqlSupportedResultSetConcurrency} + */ + public enum SqlSupportedResultSetConcurrency + implements com.google.protobuf.ProtocolMessageEnum { + /** + * SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED = 0; + */ + SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED(0), + /** + * SQL_RESULT_SET_CONCURRENCY_READ_ONLY = 1; + */ + SQL_RESULT_SET_CONCURRENCY_READ_ONLY(1), + /** + * SQL_RESULT_SET_CONCURRENCY_UPDATABLE = 2; + */ + SQL_RESULT_SET_CONCURRENCY_UPDATABLE(2), + UNRECOGNIZED(-1), + ; + + /** + * SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED = 0; + */ + public static final int SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED_VALUE = 0; + /** + * SQL_RESULT_SET_CONCURRENCY_READ_ONLY = 1; + */ + public static final int SQL_RESULT_SET_CONCURRENCY_READ_ONLY_VALUE = 1; + /** + * SQL_RESULT_SET_CONCURRENCY_UPDATABLE = 2; + */ + public static final int SQL_RESULT_SET_CONCURRENCY_UPDATABLE_VALUE = 2; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static SqlSupportedResultSetConcurrency valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static SqlSupportedResultSetConcurrency forNumber(int value) { + switch (value) { + case 0: return SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED; + case 1: return SQL_RESULT_SET_CONCURRENCY_READ_ONLY; + case 2: return SQL_RESULT_SET_CONCURRENCY_UPDATABLE; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + SqlSupportedResultSetConcurrency> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public SqlSupportedResultSetConcurrency findValueByNumber(int number) { + return SqlSupportedResultSetConcurrency.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.getDescriptor().getEnumTypes().get(15); + } + + private static final SqlSupportedResultSetConcurrency[] VALUES = values(); + + public static SqlSupportedResultSetConcurrency valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private SqlSupportedResultSetConcurrency(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.SqlSupportedResultSetConcurrency) + } + + /** + * Protobuf enum {@code arrow.flight.protocol.sql.SqlSupportsConvert} + */ + public enum SqlSupportsConvert + implements com.google.protobuf.ProtocolMessageEnum { + /** + * SQL_CONVERT_BIGINT = 0; + */ + SQL_CONVERT_BIGINT(0), + /** + * SQL_CONVERT_BINARY = 1; + */ + SQL_CONVERT_BINARY(1), + /** + * SQL_CONVERT_BIT = 2; + */ + SQL_CONVERT_BIT(2), + /** + * SQL_CONVERT_CHAR = 3; + */ + SQL_CONVERT_CHAR(3), + /** + * SQL_CONVERT_DATE = 4; + */ + SQL_CONVERT_DATE(4), + /** + * SQL_CONVERT_DECIMAL = 5; + */ + SQL_CONVERT_DECIMAL(5), + /** + * SQL_CONVERT_FLOAT = 6; + */ + SQL_CONVERT_FLOAT(6), + /** + * SQL_CONVERT_INTEGER = 7; + */ + SQL_CONVERT_INTEGER(7), + /** + * SQL_CONVERT_INTERVAL_DAY_TIME = 8; + */ + SQL_CONVERT_INTERVAL_DAY_TIME(8), + /** + * SQL_CONVERT_INTERVAL_YEAR_MONTH = 9; + */ + SQL_CONVERT_INTERVAL_YEAR_MONTH(9), + /** + * SQL_CONVERT_LONGVARBINARY = 10; + */ + SQL_CONVERT_LONGVARBINARY(10), + /** + * SQL_CONVERT_LONGVARCHAR = 11; + */ + SQL_CONVERT_LONGVARCHAR(11), + /** + * SQL_CONVERT_NUMERIC = 12; + */ + SQL_CONVERT_NUMERIC(12), + /** + * SQL_CONVERT_REAL = 13; + */ + SQL_CONVERT_REAL(13), + /** + * SQL_CONVERT_SMALLINT = 14; + */ + SQL_CONVERT_SMALLINT(14), + /** + * SQL_CONVERT_TIME = 15; + */ + SQL_CONVERT_TIME(15), + /** + * SQL_CONVERT_TIMESTAMP = 16; + */ + SQL_CONVERT_TIMESTAMP(16), + /** + * SQL_CONVERT_TINYINT = 17; + */ + SQL_CONVERT_TINYINT(17), + /** + * SQL_CONVERT_VARBINARY = 18; + */ + SQL_CONVERT_VARBINARY(18), + /** + * SQL_CONVERT_VARCHAR = 19; + */ + SQL_CONVERT_VARCHAR(19), + UNRECOGNIZED(-1), + ; + + /** + * SQL_CONVERT_BIGINT = 0; + */ + public static final int SQL_CONVERT_BIGINT_VALUE = 0; + /** + * SQL_CONVERT_BINARY = 1; + */ + public static final int SQL_CONVERT_BINARY_VALUE = 1; + /** + * SQL_CONVERT_BIT = 2; + */ + public static final int SQL_CONVERT_BIT_VALUE = 2; + /** + * SQL_CONVERT_CHAR = 3; + */ + public static final int SQL_CONVERT_CHAR_VALUE = 3; + /** + * SQL_CONVERT_DATE = 4; + */ + public static final int SQL_CONVERT_DATE_VALUE = 4; + /** + * SQL_CONVERT_DECIMAL = 5; + */ + public static final int SQL_CONVERT_DECIMAL_VALUE = 5; + /** + * SQL_CONVERT_FLOAT = 6; + */ + public static final int SQL_CONVERT_FLOAT_VALUE = 6; + /** + * SQL_CONVERT_INTEGER = 7; + */ + public static final int SQL_CONVERT_INTEGER_VALUE = 7; + /** + * SQL_CONVERT_INTERVAL_DAY_TIME = 8; + */ + public static final int SQL_CONVERT_INTERVAL_DAY_TIME_VALUE = 8; + /** + * SQL_CONVERT_INTERVAL_YEAR_MONTH = 9; + */ + public static final int SQL_CONVERT_INTERVAL_YEAR_MONTH_VALUE = 9; + /** + * SQL_CONVERT_LONGVARBINARY = 10; + */ + public static final int SQL_CONVERT_LONGVARBINARY_VALUE = 10; + /** + * SQL_CONVERT_LONGVARCHAR = 11; + */ + public static final int SQL_CONVERT_LONGVARCHAR_VALUE = 11; + /** + * SQL_CONVERT_NUMERIC = 12; + */ + public static final int SQL_CONVERT_NUMERIC_VALUE = 12; + /** + * SQL_CONVERT_REAL = 13; + */ + public static final int SQL_CONVERT_REAL_VALUE = 13; + /** + * SQL_CONVERT_SMALLINT = 14; + */ + public static final int SQL_CONVERT_SMALLINT_VALUE = 14; + /** + * SQL_CONVERT_TIME = 15; + */ + public static final int SQL_CONVERT_TIME_VALUE = 15; + /** + * SQL_CONVERT_TIMESTAMP = 16; + */ + public static final int SQL_CONVERT_TIMESTAMP_VALUE = 16; + /** + * SQL_CONVERT_TINYINT = 17; + */ + public static final int SQL_CONVERT_TINYINT_VALUE = 17; + /** + * SQL_CONVERT_VARBINARY = 18; + */ + public static final int SQL_CONVERT_VARBINARY_VALUE = 18; + /** + * SQL_CONVERT_VARCHAR = 19; + */ + public static final int SQL_CONVERT_VARCHAR_VALUE = 19; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static SqlSupportsConvert valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static SqlSupportsConvert forNumber(int value) { + switch (value) { + case 0: return SQL_CONVERT_BIGINT; + case 1: return SQL_CONVERT_BINARY; + case 2: return SQL_CONVERT_BIT; + case 3: return SQL_CONVERT_CHAR; + case 4: return SQL_CONVERT_DATE; + case 5: return SQL_CONVERT_DECIMAL; + case 6: return SQL_CONVERT_FLOAT; + case 7: return SQL_CONVERT_INTEGER; + case 8: return SQL_CONVERT_INTERVAL_DAY_TIME; + case 9: return SQL_CONVERT_INTERVAL_YEAR_MONTH; + case 10: return SQL_CONVERT_LONGVARBINARY; + case 11: return SQL_CONVERT_LONGVARCHAR; + case 12: return SQL_CONVERT_NUMERIC; + case 13: return SQL_CONVERT_REAL; + case 14: return SQL_CONVERT_SMALLINT; + case 15: return SQL_CONVERT_TIME; + case 16: return SQL_CONVERT_TIMESTAMP; + case 17: return SQL_CONVERT_TINYINT; + case 18: return SQL_CONVERT_VARBINARY; + case 19: return SQL_CONVERT_VARCHAR; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + SqlSupportsConvert> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public SqlSupportsConvert findValueByNumber(int number) { + return SqlSupportsConvert.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.getDescriptor().getEnumTypes().get(16); + } + + private static final SqlSupportsConvert[] VALUES = values(); + + public static SqlSupportsConvert valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private SqlSupportsConvert(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.SqlSupportsConvert) + } + + /** + *
+   **
+   * The JDBC/ODBC-defined type of any object.
+   * All the values here are the same as in the JDBC and ODBC specs.
+   * 
+ * + * Protobuf enum {@code arrow.flight.protocol.sql.XdbcDataType} + */ + public enum XdbcDataType + implements com.google.protobuf.ProtocolMessageEnum { + /** + * XDBC_UNKNOWN_TYPE = 0; + */ + XDBC_UNKNOWN_TYPE(0), + /** + * XDBC_CHAR = 1; + */ + XDBC_CHAR(1), + /** + * XDBC_NUMERIC = 2; + */ + XDBC_NUMERIC(2), + /** + * XDBC_DECIMAL = 3; + */ + XDBC_DECIMAL(3), + /** + * XDBC_INTEGER = 4; + */ + XDBC_INTEGER(4), + /** + * XDBC_SMALLINT = 5; + */ + XDBC_SMALLINT(5), + /** + * XDBC_FLOAT = 6; + */ + XDBC_FLOAT(6), + /** + * XDBC_REAL = 7; + */ + XDBC_REAL(7), + /** + * XDBC_DOUBLE = 8; + */ + XDBC_DOUBLE(8), + /** + * XDBC_DATETIME = 9; + */ + XDBC_DATETIME(9), + /** + * XDBC_INTERVAL = 10; + */ + XDBC_INTERVAL(10), + /** + * XDBC_VARCHAR = 12; + */ + XDBC_VARCHAR(12), + /** + * XDBC_DATE = 91; + */ + XDBC_DATE(91), + /** + * XDBC_TIME = 92; + */ + XDBC_TIME(92), + /** + * XDBC_TIMESTAMP = 93; + */ + XDBC_TIMESTAMP(93), + /** + * XDBC_LONGVARCHAR = -1; + */ + XDBC_LONGVARCHAR(-1), + /** + * XDBC_BINARY = -2; + */ + XDBC_BINARY(-2), + /** + * XDBC_VARBINARY = -3; + */ + XDBC_VARBINARY(-3), + /** + * XDBC_LONGVARBINARY = -4; + */ + XDBC_LONGVARBINARY(-4), + /** + * XDBC_BIGINT = -5; + */ + XDBC_BIGINT(-5), + /** + * XDBC_TINYINT = -6; + */ + XDBC_TINYINT(-6), + /** + * XDBC_BIT = -7; + */ + XDBC_BIT(-7), + /** + * XDBC_WCHAR = -8; + */ + XDBC_WCHAR(-8), + /** + * XDBC_WVARCHAR = -9; + */ + XDBC_WVARCHAR(-9), + UNRECOGNIZED(-1), + ; + + /** + * XDBC_UNKNOWN_TYPE = 0; + */ + public static final int XDBC_UNKNOWN_TYPE_VALUE = 0; + /** + * XDBC_CHAR = 1; + */ + public static final int XDBC_CHAR_VALUE = 1; + /** + * XDBC_NUMERIC = 2; + */ + public static final int XDBC_NUMERIC_VALUE = 2; + /** + * XDBC_DECIMAL = 3; + */ + public static final int XDBC_DECIMAL_VALUE = 3; + /** + * XDBC_INTEGER = 4; + */ + public static final int XDBC_INTEGER_VALUE = 4; + /** + * XDBC_SMALLINT = 5; + */ + public static final int XDBC_SMALLINT_VALUE = 5; + /** + * XDBC_FLOAT = 6; + */ + public static final int XDBC_FLOAT_VALUE = 6; + /** + * XDBC_REAL = 7; + */ + public static final int XDBC_REAL_VALUE = 7; + /** + * XDBC_DOUBLE = 8; + */ + public static final int XDBC_DOUBLE_VALUE = 8; + /** + * XDBC_DATETIME = 9; + */ + public static final int XDBC_DATETIME_VALUE = 9; + /** + * XDBC_INTERVAL = 10; + */ + public static final int XDBC_INTERVAL_VALUE = 10; + /** + * XDBC_VARCHAR = 12; + */ + public static final int XDBC_VARCHAR_VALUE = 12; + /** + * XDBC_DATE = 91; + */ + public static final int XDBC_DATE_VALUE = 91; + /** + * XDBC_TIME = 92; + */ + public static final int XDBC_TIME_VALUE = 92; + /** + * XDBC_TIMESTAMP = 93; + */ + public static final int XDBC_TIMESTAMP_VALUE = 93; + /** + * XDBC_LONGVARCHAR = -1; + */ + public static final int XDBC_LONGVARCHAR_VALUE = -1; + /** + * XDBC_BINARY = -2; + */ + public static final int XDBC_BINARY_VALUE = -2; + /** + * XDBC_VARBINARY = -3; + */ + public static final int XDBC_VARBINARY_VALUE = -3; + /** + * XDBC_LONGVARBINARY = -4; + */ + public static final int XDBC_LONGVARBINARY_VALUE = -4; + /** + * XDBC_BIGINT = -5; + */ + public static final int XDBC_BIGINT_VALUE = -5; + /** + * XDBC_TINYINT = -6; + */ + public static final int XDBC_TINYINT_VALUE = -6; + /** + * XDBC_BIT = -7; + */ + public static final int XDBC_BIT_VALUE = -7; + /** + * XDBC_WCHAR = -8; + */ + public static final int XDBC_WCHAR_VALUE = -8; + /** + * XDBC_WVARCHAR = -9; + */ + public static final int XDBC_WVARCHAR_VALUE = -9; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static XdbcDataType valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static XdbcDataType forNumber(int value) { + switch (value) { + case 0: return XDBC_UNKNOWN_TYPE; + case 1: return XDBC_CHAR; + case 2: return XDBC_NUMERIC; + case 3: return XDBC_DECIMAL; + case 4: return XDBC_INTEGER; + case 5: return XDBC_SMALLINT; + case 6: return XDBC_FLOAT; + case 7: return XDBC_REAL; + case 8: return XDBC_DOUBLE; + case 9: return XDBC_DATETIME; + case 10: return XDBC_INTERVAL; + case 12: return XDBC_VARCHAR; + case 91: return XDBC_DATE; + case 92: return XDBC_TIME; + case 93: return XDBC_TIMESTAMP; + case -1: return XDBC_LONGVARCHAR; + case -2: return XDBC_BINARY; + case -3: return XDBC_VARBINARY; + case -4: return XDBC_LONGVARBINARY; + case -5: return XDBC_BIGINT; + case -6: return XDBC_TINYINT; + case -7: return XDBC_BIT; + case -8: return XDBC_WCHAR; + case -9: return XDBC_WVARCHAR; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + XdbcDataType> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public XdbcDataType findValueByNumber(int number) { + return XdbcDataType.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.getDescriptor().getEnumTypes().get(17); + } + + private static final XdbcDataType[] VALUES = values(); + + public static XdbcDataType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private XdbcDataType(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.XdbcDataType) + } + + /** + *
+   **
+   * Detailed subtype information for XDBC_TYPE_DATETIME and XDBC_TYPE_INTERVAL.
+   * 
+ * + * Protobuf enum {@code arrow.flight.protocol.sql.XdbcDatetimeSubcode} + */ + public enum XdbcDatetimeSubcode + implements com.google.protobuf.ProtocolMessageEnum { + /** + * XDBC_SUBCODE_UNKNOWN = 0; + */ + XDBC_SUBCODE_UNKNOWN(0, 0), + /** + * XDBC_SUBCODE_YEAR = 1; + */ + XDBC_SUBCODE_YEAR(1, 1), + /** + * XDBC_SUBCODE_TIME = 2; + */ + XDBC_SUBCODE_TIME(3, 2), + /** + * XDBC_SUBCODE_TIMESTAMP = 3; + */ + XDBC_SUBCODE_TIMESTAMP(5, 3), + /** + * XDBC_SUBCODE_TIME_WITH_TIMEZONE = 4; + */ + XDBC_SUBCODE_TIME_WITH_TIMEZONE(7, 4), + /** + * XDBC_SUBCODE_TIMESTAMP_WITH_TIMEZONE = 5; + */ + XDBC_SUBCODE_TIMESTAMP_WITH_TIMEZONE(9, 5), + /** + * XDBC_SUBCODE_SECOND = 6; + */ + XDBC_SUBCODE_SECOND(11, 6), + /** + * XDBC_SUBCODE_YEAR_TO_MONTH = 7; + */ + XDBC_SUBCODE_YEAR_TO_MONTH(12, 7), + /** + * XDBC_SUBCODE_DAY_TO_HOUR = 8; + */ + XDBC_SUBCODE_DAY_TO_HOUR(13, 8), + /** + * XDBC_SUBCODE_DAY_TO_MINUTE = 9; + */ + XDBC_SUBCODE_DAY_TO_MINUTE(14, 9), + /** + * XDBC_SUBCODE_DAY_TO_SECOND = 10; + */ + XDBC_SUBCODE_DAY_TO_SECOND(15, 10), + /** + * XDBC_SUBCODE_HOUR_TO_MINUTE = 11; + */ + XDBC_SUBCODE_HOUR_TO_MINUTE(16, 11), + /** + * XDBC_SUBCODE_HOUR_TO_SECOND = 12; + */ + XDBC_SUBCODE_HOUR_TO_SECOND(17, 12), + /** + * XDBC_SUBCODE_MINUTE_TO_SECOND = 13; + */ + XDBC_SUBCODE_MINUTE_TO_SECOND(18, 13), + /** + * XDBC_SUBCODE_INTERVAL_YEAR = 101; + */ + XDBC_SUBCODE_INTERVAL_YEAR(19, 101), + /** + * XDBC_SUBCODE_INTERVAL_MONTH = 102; + */ + XDBC_SUBCODE_INTERVAL_MONTH(20, 102), + /** + * XDBC_SUBCODE_INTERVAL_DAY = 103; + */ + XDBC_SUBCODE_INTERVAL_DAY(21, 103), + /** + * XDBC_SUBCODE_INTERVAL_HOUR = 104; + */ + XDBC_SUBCODE_INTERVAL_HOUR(22, 104), + /** + * XDBC_SUBCODE_INTERVAL_MINUTE = 105; + */ + XDBC_SUBCODE_INTERVAL_MINUTE(23, 105), + /** + * XDBC_SUBCODE_INTERVAL_SECOND = 106; + */ + XDBC_SUBCODE_INTERVAL_SECOND(24, 106), + /** + * XDBC_SUBCODE_INTERVAL_YEAR_TO_MONTH = 107; + */ + XDBC_SUBCODE_INTERVAL_YEAR_TO_MONTH(25, 107), + /** + * XDBC_SUBCODE_INTERVAL_DAY_TO_HOUR = 108; + */ + XDBC_SUBCODE_INTERVAL_DAY_TO_HOUR(26, 108), + /** + * XDBC_SUBCODE_INTERVAL_DAY_TO_MINUTE = 109; + */ + XDBC_SUBCODE_INTERVAL_DAY_TO_MINUTE(27, 109), + /** + * XDBC_SUBCODE_INTERVAL_DAY_TO_SECOND = 110; + */ + XDBC_SUBCODE_INTERVAL_DAY_TO_SECOND(28, 110), + /** + * XDBC_SUBCODE_INTERVAL_HOUR_TO_MINUTE = 111; + */ + XDBC_SUBCODE_INTERVAL_HOUR_TO_MINUTE(29, 111), + /** + * XDBC_SUBCODE_INTERVAL_HOUR_TO_SECOND = 112; + */ + XDBC_SUBCODE_INTERVAL_HOUR_TO_SECOND(30, 112), + /** + * XDBC_SUBCODE_INTERVAL_MINUTE_TO_SECOND = 113; + */ + XDBC_SUBCODE_INTERVAL_MINUTE_TO_SECOND(31, 113), + UNRECOGNIZED(-1, -1), + ; + + /** + * XDBC_SUBCODE_DATE = 1; + */ + public static final XdbcDatetimeSubcode XDBC_SUBCODE_DATE = XDBC_SUBCODE_YEAR; + /** + * XDBC_SUBCODE_MONTH = 2; + */ + public static final XdbcDatetimeSubcode XDBC_SUBCODE_MONTH = XDBC_SUBCODE_TIME; + /** + * XDBC_SUBCODE_DAY = 3; + */ + public static final XdbcDatetimeSubcode XDBC_SUBCODE_DAY = XDBC_SUBCODE_TIMESTAMP; + /** + * XDBC_SUBCODE_HOUR = 4; + */ + public static final XdbcDatetimeSubcode XDBC_SUBCODE_HOUR = XDBC_SUBCODE_TIME_WITH_TIMEZONE; + /** + * XDBC_SUBCODE_MINUTE = 5; + */ + public static final XdbcDatetimeSubcode XDBC_SUBCODE_MINUTE = XDBC_SUBCODE_TIMESTAMP_WITH_TIMEZONE; + /** + * XDBC_SUBCODE_UNKNOWN = 0; + */ + public static final int XDBC_SUBCODE_UNKNOWN_VALUE = 0; + /** + * XDBC_SUBCODE_YEAR = 1; + */ + public static final int XDBC_SUBCODE_YEAR_VALUE = 1; + /** + * XDBC_SUBCODE_DATE = 1; + */ + public static final int XDBC_SUBCODE_DATE_VALUE = 1; + /** + * XDBC_SUBCODE_TIME = 2; + */ + public static final int XDBC_SUBCODE_TIME_VALUE = 2; + /** + * XDBC_SUBCODE_MONTH = 2; + */ + public static final int XDBC_SUBCODE_MONTH_VALUE = 2; + /** + * XDBC_SUBCODE_TIMESTAMP = 3; + */ + public static final int XDBC_SUBCODE_TIMESTAMP_VALUE = 3; + /** + * XDBC_SUBCODE_DAY = 3; + */ + public static final int XDBC_SUBCODE_DAY_VALUE = 3; + /** + * XDBC_SUBCODE_TIME_WITH_TIMEZONE = 4; + */ + public static final int XDBC_SUBCODE_TIME_WITH_TIMEZONE_VALUE = 4; + /** + * XDBC_SUBCODE_HOUR = 4; + */ + public static final int XDBC_SUBCODE_HOUR_VALUE = 4; + /** + * XDBC_SUBCODE_TIMESTAMP_WITH_TIMEZONE = 5; + */ + public static final int XDBC_SUBCODE_TIMESTAMP_WITH_TIMEZONE_VALUE = 5; + /** + * XDBC_SUBCODE_MINUTE = 5; + */ + public static final int XDBC_SUBCODE_MINUTE_VALUE = 5; + /** + * XDBC_SUBCODE_SECOND = 6; + */ + public static final int XDBC_SUBCODE_SECOND_VALUE = 6; + /** + * XDBC_SUBCODE_YEAR_TO_MONTH = 7; + */ + public static final int XDBC_SUBCODE_YEAR_TO_MONTH_VALUE = 7; + /** + * XDBC_SUBCODE_DAY_TO_HOUR = 8; + */ + public static final int XDBC_SUBCODE_DAY_TO_HOUR_VALUE = 8; + /** + * XDBC_SUBCODE_DAY_TO_MINUTE = 9; + */ + public static final int XDBC_SUBCODE_DAY_TO_MINUTE_VALUE = 9; + /** + * XDBC_SUBCODE_DAY_TO_SECOND = 10; + */ + public static final int XDBC_SUBCODE_DAY_TO_SECOND_VALUE = 10; + /** + * XDBC_SUBCODE_HOUR_TO_MINUTE = 11; + */ + public static final int XDBC_SUBCODE_HOUR_TO_MINUTE_VALUE = 11; + /** + * XDBC_SUBCODE_HOUR_TO_SECOND = 12; + */ + public static final int XDBC_SUBCODE_HOUR_TO_SECOND_VALUE = 12; + /** + * XDBC_SUBCODE_MINUTE_TO_SECOND = 13; + */ + public static final int XDBC_SUBCODE_MINUTE_TO_SECOND_VALUE = 13; + /** + * XDBC_SUBCODE_INTERVAL_YEAR = 101; + */ + public static final int XDBC_SUBCODE_INTERVAL_YEAR_VALUE = 101; + /** + * XDBC_SUBCODE_INTERVAL_MONTH = 102; + */ + public static final int XDBC_SUBCODE_INTERVAL_MONTH_VALUE = 102; + /** + * XDBC_SUBCODE_INTERVAL_DAY = 103; + */ + public static final int XDBC_SUBCODE_INTERVAL_DAY_VALUE = 103; + /** + * XDBC_SUBCODE_INTERVAL_HOUR = 104; + */ + public static final int XDBC_SUBCODE_INTERVAL_HOUR_VALUE = 104; + /** + * XDBC_SUBCODE_INTERVAL_MINUTE = 105; + */ + public static final int XDBC_SUBCODE_INTERVAL_MINUTE_VALUE = 105; + /** + * XDBC_SUBCODE_INTERVAL_SECOND = 106; + */ + public static final int XDBC_SUBCODE_INTERVAL_SECOND_VALUE = 106; + /** + * XDBC_SUBCODE_INTERVAL_YEAR_TO_MONTH = 107; + */ + public static final int XDBC_SUBCODE_INTERVAL_YEAR_TO_MONTH_VALUE = 107; + /** + * XDBC_SUBCODE_INTERVAL_DAY_TO_HOUR = 108; + */ + public static final int XDBC_SUBCODE_INTERVAL_DAY_TO_HOUR_VALUE = 108; + /** + * XDBC_SUBCODE_INTERVAL_DAY_TO_MINUTE = 109; + */ + public static final int XDBC_SUBCODE_INTERVAL_DAY_TO_MINUTE_VALUE = 109; + /** + * XDBC_SUBCODE_INTERVAL_DAY_TO_SECOND = 110; + */ + public static final int XDBC_SUBCODE_INTERVAL_DAY_TO_SECOND_VALUE = 110; + /** + * XDBC_SUBCODE_INTERVAL_HOUR_TO_MINUTE = 111; + */ + public static final int XDBC_SUBCODE_INTERVAL_HOUR_TO_MINUTE_VALUE = 111; + /** + * XDBC_SUBCODE_INTERVAL_HOUR_TO_SECOND = 112; + */ + public static final int XDBC_SUBCODE_INTERVAL_HOUR_TO_SECOND_VALUE = 112; + /** + * XDBC_SUBCODE_INTERVAL_MINUTE_TO_SECOND = 113; + */ + public static final int XDBC_SUBCODE_INTERVAL_MINUTE_TO_SECOND_VALUE = 113; + + + public final int getNumber() { + if (index == -1) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static XdbcDatetimeSubcode valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static XdbcDatetimeSubcode forNumber(int value) { + switch (value) { + case 0: return XDBC_SUBCODE_UNKNOWN; + case 1: return XDBC_SUBCODE_YEAR; + case 2: return XDBC_SUBCODE_TIME; + case 3: return XDBC_SUBCODE_TIMESTAMP; + case 4: return XDBC_SUBCODE_TIME_WITH_TIMEZONE; + case 5: return XDBC_SUBCODE_TIMESTAMP_WITH_TIMEZONE; + case 6: return XDBC_SUBCODE_SECOND; + case 7: return XDBC_SUBCODE_YEAR_TO_MONTH; + case 8: return XDBC_SUBCODE_DAY_TO_HOUR; + case 9: return XDBC_SUBCODE_DAY_TO_MINUTE; + case 10: return XDBC_SUBCODE_DAY_TO_SECOND; + case 11: return XDBC_SUBCODE_HOUR_TO_MINUTE; + case 12: return XDBC_SUBCODE_HOUR_TO_SECOND; + case 13: return XDBC_SUBCODE_MINUTE_TO_SECOND; + case 101: return XDBC_SUBCODE_INTERVAL_YEAR; + case 102: return XDBC_SUBCODE_INTERVAL_MONTH; + case 103: return XDBC_SUBCODE_INTERVAL_DAY; + case 104: return XDBC_SUBCODE_INTERVAL_HOUR; + case 105: return XDBC_SUBCODE_INTERVAL_MINUTE; + case 106: return XDBC_SUBCODE_INTERVAL_SECOND; + case 107: return XDBC_SUBCODE_INTERVAL_YEAR_TO_MONTH; + case 108: return XDBC_SUBCODE_INTERVAL_DAY_TO_HOUR; + case 109: return XDBC_SUBCODE_INTERVAL_DAY_TO_MINUTE; + case 110: return XDBC_SUBCODE_INTERVAL_DAY_TO_SECOND; + case 111: return XDBC_SUBCODE_INTERVAL_HOUR_TO_MINUTE; + case 112: return XDBC_SUBCODE_INTERVAL_HOUR_TO_SECOND; + case 113: return XDBC_SUBCODE_INTERVAL_MINUTE_TO_SECOND; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + XdbcDatetimeSubcode> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public XdbcDatetimeSubcode findValueByNumber(int number) { + return XdbcDatetimeSubcode.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (index == -1) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.getDescriptor().getEnumTypes().get(18); + } + + private static final XdbcDatetimeSubcode[] VALUES = getStaticValuesArray(); + private static XdbcDatetimeSubcode[] getStaticValuesArray() { + return new XdbcDatetimeSubcode[] { + XDBC_SUBCODE_UNKNOWN, XDBC_SUBCODE_YEAR, XDBC_SUBCODE_DATE, XDBC_SUBCODE_TIME, XDBC_SUBCODE_MONTH, XDBC_SUBCODE_TIMESTAMP, XDBC_SUBCODE_DAY, XDBC_SUBCODE_TIME_WITH_TIMEZONE, XDBC_SUBCODE_HOUR, XDBC_SUBCODE_TIMESTAMP_WITH_TIMEZONE, XDBC_SUBCODE_MINUTE, XDBC_SUBCODE_SECOND, XDBC_SUBCODE_YEAR_TO_MONTH, XDBC_SUBCODE_DAY_TO_HOUR, XDBC_SUBCODE_DAY_TO_MINUTE, XDBC_SUBCODE_DAY_TO_SECOND, XDBC_SUBCODE_HOUR_TO_MINUTE, XDBC_SUBCODE_HOUR_TO_SECOND, XDBC_SUBCODE_MINUTE_TO_SECOND, XDBC_SUBCODE_INTERVAL_YEAR, XDBC_SUBCODE_INTERVAL_MONTH, XDBC_SUBCODE_INTERVAL_DAY, XDBC_SUBCODE_INTERVAL_HOUR, XDBC_SUBCODE_INTERVAL_MINUTE, XDBC_SUBCODE_INTERVAL_SECOND, XDBC_SUBCODE_INTERVAL_YEAR_TO_MONTH, XDBC_SUBCODE_INTERVAL_DAY_TO_HOUR, XDBC_SUBCODE_INTERVAL_DAY_TO_MINUTE, XDBC_SUBCODE_INTERVAL_DAY_TO_SECOND, XDBC_SUBCODE_INTERVAL_HOUR_TO_MINUTE, XDBC_SUBCODE_INTERVAL_HOUR_TO_SECOND, XDBC_SUBCODE_INTERVAL_MINUTE_TO_SECOND, + }; + } + public static XdbcDatetimeSubcode valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private XdbcDatetimeSubcode(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.XdbcDatetimeSubcode) + } + + /** + * Protobuf enum {@code arrow.flight.protocol.sql.Nullable} + */ + public enum Nullable + implements com.google.protobuf.ProtocolMessageEnum { + /** + *
+     **
+     * Indicates that the fields does not allow the use of null values.
+     * 
+ * + * NULLABILITY_NO_NULLS = 0; + */ + NULLABILITY_NO_NULLS(0), + /** + *
+     **
+     * Indicates that the fields allow the use of null values.
+     * 
+ * + * NULLABILITY_NULLABLE = 1; + */ + NULLABILITY_NULLABLE(1), + /** + *
+     **
+     * Indicates that nullability of the fields cannot be determined.
+     * 
+ * + * NULLABILITY_UNKNOWN = 2; + */ + NULLABILITY_UNKNOWN(2), + UNRECOGNIZED(-1), + ; + + /** + *
+     **
+     * Indicates that the fields does not allow the use of null values.
+     * 
+ * + * NULLABILITY_NO_NULLS = 0; + */ + public static final int NULLABILITY_NO_NULLS_VALUE = 0; + /** + *
+     **
+     * Indicates that the fields allow the use of null values.
+     * 
+ * + * NULLABILITY_NULLABLE = 1; + */ + public static final int NULLABILITY_NULLABLE_VALUE = 1; + /** + *
+     **
+     * Indicates that nullability of the fields cannot be determined.
+     * 
+ * + * NULLABILITY_UNKNOWN = 2; + */ + public static final int NULLABILITY_UNKNOWN_VALUE = 2; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static Nullable valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static Nullable forNumber(int value) { + switch (value) { + case 0: return NULLABILITY_NO_NULLS; + case 1: return NULLABILITY_NULLABLE; + case 2: return NULLABILITY_UNKNOWN; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + Nullable> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public Nullable findValueByNumber(int number) { + return Nullable.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.getDescriptor().getEnumTypes().get(19); + } + + private static final Nullable[] VALUES = values(); + + public static Nullable valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private Nullable(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.Nullable) + } + + /** + * Protobuf enum {@code arrow.flight.protocol.sql.Searchable} + */ + public enum Searchable + implements com.google.protobuf.ProtocolMessageEnum { + /** + *
+     **
+     * Indicates that column cannot be used in a WHERE clause.
+     * 
+ * + * SEARCHABLE_NONE = 0; + */ + SEARCHABLE_NONE(0), + /** + *
+     **
+     * Indicates that the column can be used in a WHERE clause if it is using a
+     * LIKE operator.
+     * 
+ * + * SEARCHABLE_CHAR = 1; + */ + SEARCHABLE_CHAR(1), + /** + *
+     **
+     * Indicates that the column can be used In a WHERE clause with any
+     * operator other than LIKE.
+     *
+     * - Allowed operators: comparison, quantified comparison, BETWEEN,
+     *                      DISTINCT, IN, MATCH, and UNIQUE.
+     * 
+ * + * SEARCHABLE_BASIC = 2; + */ + SEARCHABLE_BASIC(2), + /** + *
+     **
+     * Indicates that the column can be used in a WHERE clause using any operator.
+     * 
+ * + * SEARCHABLE_FULL = 3; + */ + SEARCHABLE_FULL(3), + UNRECOGNIZED(-1), + ; + + /** + *
+     **
+     * Indicates that column cannot be used in a WHERE clause.
+     * 
+ * + * SEARCHABLE_NONE = 0; + */ + public static final int SEARCHABLE_NONE_VALUE = 0; + /** + *
+     **
+     * Indicates that the column can be used in a WHERE clause if it is using a
+     * LIKE operator.
+     * 
+ * + * SEARCHABLE_CHAR = 1; + */ + public static final int SEARCHABLE_CHAR_VALUE = 1; + /** + *
+     **
+     * Indicates that the column can be used In a WHERE clause with any
+     * operator other than LIKE.
+     *
+     * - Allowed operators: comparison, quantified comparison, BETWEEN,
+     *                      DISTINCT, IN, MATCH, and UNIQUE.
+     * 
+ * + * SEARCHABLE_BASIC = 2; + */ + public static final int SEARCHABLE_BASIC_VALUE = 2; + /** + *
+     **
+     * Indicates that the column can be used in a WHERE clause using any operator.
+     * 
+ * + * SEARCHABLE_FULL = 3; + */ + public static final int SEARCHABLE_FULL_VALUE = 3; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static Searchable valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static Searchable forNumber(int value) { + switch (value) { + case 0: return SEARCHABLE_NONE; + case 1: return SEARCHABLE_CHAR; + case 2: return SEARCHABLE_BASIC; + case 3: return SEARCHABLE_FULL; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + Searchable> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public Searchable findValueByNumber(int number) { + return Searchable.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.getDescriptor().getEnumTypes().get(20); + } + + private static final Searchable[] VALUES = values(); + + public static Searchable valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private Searchable(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.Searchable) + } + + /** + * Protobuf enum {@code arrow.flight.protocol.sql.UpdateDeleteRules} + */ + public enum UpdateDeleteRules + implements com.google.protobuf.ProtocolMessageEnum { + /** + * CASCADE = 0; + */ + CASCADE(0), + /** + * RESTRICT = 1; + */ + RESTRICT(1), + /** + * SET_NULL = 2; + */ + SET_NULL(2), + /** + * NO_ACTION = 3; + */ + NO_ACTION(3), + /** + * SET_DEFAULT = 4; + */ + SET_DEFAULT(4), + UNRECOGNIZED(-1), + ; + + /** + * CASCADE = 0; + */ + public static final int CASCADE_VALUE = 0; + /** + * RESTRICT = 1; + */ + public static final int RESTRICT_VALUE = 1; + /** + * SET_NULL = 2; + */ + public static final int SET_NULL_VALUE = 2; + /** + * NO_ACTION = 3; + */ + public static final int NO_ACTION_VALUE = 3; + /** + * SET_DEFAULT = 4; + */ + public static final int SET_DEFAULT_VALUE = 4; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static UpdateDeleteRules valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static UpdateDeleteRules forNumber(int value) { + switch (value) { + case 0: return CASCADE; + case 1: return RESTRICT; + case 2: return SET_NULL; + case 3: return NO_ACTION; + case 4: return SET_DEFAULT; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + UpdateDeleteRules> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public UpdateDeleteRules findValueByNumber(int number) { + return UpdateDeleteRules.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.getDescriptor().getEnumTypes().get(21); + } + + private static final UpdateDeleteRules[] VALUES = values(); + + public static UpdateDeleteRules valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private UpdateDeleteRules(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.UpdateDeleteRules) + } + + public interface CommandGetSqlInfoOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.CommandGetSqlInfo) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     *
+     * Values are modelled after ODBC's SQLGetInfo() function. This information is intended to provide
+     * Flight SQL clients with basic, SQL syntax and SQL functions related information.
+     * More information types can be added in future releases.
+     * E.g. more SQL syntax support types, scalar functions support, type conversion support etc.
+     *
+     * Note that the set of metadata may expand.
+     *
+     * Initially, Flight SQL will support the following information types:
+     * - Server Information - Range [0-500)
+     * - Syntax Information - Range [500-1000)
+     * Range [0-10,000) is reserved for defaults (see SqlInfo enum for default options).
+     * Custom options should start at 10,000.
+     *
+     * If omitted, then all metadata will be retrieved.
+     * Flight SQL Servers may choose to include additional metadata above and beyond the specified set, however they must
+     * at least return the specified set. IDs ranging from 0 to 10,000 (exclusive) are reserved for future use.
+     * If additional metadata is included, the metadata IDs should start from 10,000.
+     * 
+ * + * repeated uint32 info = 1; + * @return A list containing the info. + */ + java.util.List getInfoList(); + /** + *
+     *
+     * Values are modelled after ODBC's SQLGetInfo() function. This information is intended to provide
+     * Flight SQL clients with basic, SQL syntax and SQL functions related information.
+     * More information types can be added in future releases.
+     * E.g. more SQL syntax support types, scalar functions support, type conversion support etc.
+     *
+     * Note that the set of metadata may expand.
+     *
+     * Initially, Flight SQL will support the following information types:
+     * - Server Information - Range [0-500)
+     * - Syntax Information - Range [500-1000)
+     * Range [0-10,000) is reserved for defaults (see SqlInfo enum for default options).
+     * Custom options should start at 10,000.
+     *
+     * If omitted, then all metadata will be retrieved.
+     * Flight SQL Servers may choose to include additional metadata above and beyond the specified set, however they must
+     * at least return the specified set. IDs ranging from 0 to 10,000 (exclusive) are reserved for future use.
+     * If additional metadata is included, the metadata IDs should start from 10,000.
+     * 
+ * + * repeated uint32 info = 1; + * @return The count of info. + */ + int getInfoCount(); + /** + *
+     *
+     * Values are modelled after ODBC's SQLGetInfo() function. This information is intended to provide
+     * Flight SQL clients with basic, SQL syntax and SQL functions related information.
+     * More information types can be added in future releases.
+     * E.g. more SQL syntax support types, scalar functions support, type conversion support etc.
+     *
+     * Note that the set of metadata may expand.
+     *
+     * Initially, Flight SQL will support the following information types:
+     * - Server Information - Range [0-500)
+     * - Syntax Information - Range [500-1000)
+     * Range [0-10,000) is reserved for defaults (see SqlInfo enum for default options).
+     * Custom options should start at 10,000.
+     *
+     * If omitted, then all metadata will be retrieved.
+     * Flight SQL Servers may choose to include additional metadata above and beyond the specified set, however they must
+     * at least return the specified set. IDs ranging from 0 to 10,000 (exclusive) are reserved for future use.
+     * If additional metadata is included, the metadata IDs should start from 10,000.
+     * 
+ * + * repeated uint32 info = 1; + * @param index The index of the element to return. + * @return The info at the given index. + */ + int getInfo(int index); + } + /** + *
+   *
+   * Represents a metadata request. Used in the command member of FlightDescriptor
+   * for the following RPC calls:
+   *  - GetSchema: return the Arrow schema of the query.
+   *  - GetFlightInfo: execute the metadata request.
+   *
+   * The returned Arrow schema will be:
+   * <
+   *  info_name: uint32 not null,
+   *  value: dense_union<
+   *              string_value: utf8,
+   *              bool_value: bool,
+   *              bigint_value: int64,
+   *              int32_bitmask: int32,
+   *              string_list: list<string_data: utf8>
+   *              int32_to_int32_list_map: map<key: int32, value: list<$data$: int32>>
+   * >
+   * where there is one row per requested piece of metadata information.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandGetSqlInfo} + */ + public static final class CommandGetSqlInfo extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.CommandGetSqlInfo) + CommandGetSqlInfoOrBuilder { + private static final long serialVersionUID = 0L; + // Use CommandGetSqlInfo.newBuilder() to construct. + private CommandGetSqlInfo(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private CommandGetSqlInfo() { + info_ = emptyIntList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new CommandGetSqlInfo(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetSqlInfo_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetSqlInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo.Builder.class); + } + + public static final int INFO_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private com.google.protobuf.Internal.IntList info_; + /** + *
+     *
+     * Values are modelled after ODBC's SQLGetInfo() function. This information is intended to provide
+     * Flight SQL clients with basic, SQL syntax and SQL functions related information.
+     * More information types can be added in future releases.
+     * E.g. more SQL syntax support types, scalar functions support, type conversion support etc.
+     *
+     * Note that the set of metadata may expand.
+     *
+     * Initially, Flight SQL will support the following information types:
+     * - Server Information - Range [0-500)
+     * - Syntax Information - Range [500-1000)
+     * Range [0-10,000) is reserved for defaults (see SqlInfo enum for default options).
+     * Custom options should start at 10,000.
+     *
+     * If omitted, then all metadata will be retrieved.
+     * Flight SQL Servers may choose to include additional metadata above and beyond the specified set, however they must
+     * at least return the specified set. IDs ranging from 0 to 10,000 (exclusive) are reserved for future use.
+     * If additional metadata is included, the metadata IDs should start from 10,000.
+     * 
+ * + * repeated uint32 info = 1; + * @return A list containing the info. + */ + @java.lang.Override + public java.util.List + getInfoList() { + return info_; + } + /** + *
+     *
+     * Values are modelled after ODBC's SQLGetInfo() function. This information is intended to provide
+     * Flight SQL clients with basic, SQL syntax and SQL functions related information.
+     * More information types can be added in future releases.
+     * E.g. more SQL syntax support types, scalar functions support, type conversion support etc.
+     *
+     * Note that the set of metadata may expand.
+     *
+     * Initially, Flight SQL will support the following information types:
+     * - Server Information - Range [0-500)
+     * - Syntax Information - Range [500-1000)
+     * Range [0-10,000) is reserved for defaults (see SqlInfo enum for default options).
+     * Custom options should start at 10,000.
+     *
+     * If omitted, then all metadata will be retrieved.
+     * Flight SQL Servers may choose to include additional metadata above and beyond the specified set, however they must
+     * at least return the specified set. IDs ranging from 0 to 10,000 (exclusive) are reserved for future use.
+     * If additional metadata is included, the metadata IDs should start from 10,000.
+     * 
+ * + * repeated uint32 info = 1; + * @return The count of info. + */ + public int getInfoCount() { + return info_.size(); + } + /** + *
+     *
+     * Values are modelled after ODBC's SQLGetInfo() function. This information is intended to provide
+     * Flight SQL clients with basic, SQL syntax and SQL functions related information.
+     * More information types can be added in future releases.
+     * E.g. more SQL syntax support types, scalar functions support, type conversion support etc.
+     *
+     * Note that the set of metadata may expand.
+     *
+     * Initially, Flight SQL will support the following information types:
+     * - Server Information - Range [0-500)
+     * - Syntax Information - Range [500-1000)
+     * Range [0-10,000) is reserved for defaults (see SqlInfo enum for default options).
+     * Custom options should start at 10,000.
+     *
+     * If omitted, then all metadata will be retrieved.
+     * Flight SQL Servers may choose to include additional metadata above and beyond the specified set, however they must
+     * at least return the specified set. IDs ranging from 0 to 10,000 (exclusive) are reserved for future use.
+     * If additional metadata is included, the metadata IDs should start from 10,000.
+     * 
+ * + * repeated uint32 info = 1; + * @param index The index of the element to return. + * @return The info at the given index. + */ + public int getInfo(int index) { + return info_.getInt(index); + } + private int infoMemoizedSerializedSize = -1; + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (getInfoList().size() > 0) { + output.writeUInt32NoTag(10); + output.writeUInt32NoTag(infoMemoizedSerializedSize); + } + for (int i = 0; i < info_.size(); i++) { + output.writeUInt32NoTag(info_.getInt(i)); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < info_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeUInt32SizeNoTag(info_.getInt(i)); + } + size += dataSize; + if (!getInfoList().isEmpty()) { + size += 1; + size += com.google.protobuf.CodedOutputStream + .computeInt32SizeNoTag(dataSize); + } + infoMemoizedSerializedSize = dataSize; + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo other = (org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo) obj; + + if (!getInfoList() + .equals(other.getInfoList())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getInfoCount() > 0) { + hash = (37 * hash) + INFO_FIELD_NUMBER; + hash = (53 * hash) + getInfoList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Represents a metadata request. Used in the command member of FlightDescriptor
+     * for the following RPC calls:
+     *  - GetSchema: return the Arrow schema of the query.
+     *  - GetFlightInfo: execute the metadata request.
+     *
+     * The returned Arrow schema will be:
+     * <
+     *  info_name: uint32 not null,
+     *  value: dense_union<
+     *              string_value: utf8,
+     *              bool_value: bool,
+     *              bigint_value: int64,
+     *              int32_bitmask: int32,
+     *              string_list: list<string_data: utf8>
+     *              int32_to_int32_list_map: map<key: int32, value: list<$data$: int32>>
+     * >
+     * where there is one row per requested piece of metadata information.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandGetSqlInfo} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.CommandGetSqlInfo) + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfoOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetSqlInfo_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetSqlInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + info_ = emptyIntList(); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetSqlInfo_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo build() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo result = new org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo result) { + if (((bitField0_ & 0x00000001) != 0)) { + info_.makeImmutable(); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.info_ = info_; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo result) { + int from_bitField0_ = bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo.getDefaultInstance()) return this; + if (!other.info_.isEmpty()) { + if (info_.isEmpty()) { + info_ = other.info_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureInfoIsMutable(); + info_.addAll(other.info_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + int v = input.readUInt32(); + ensureInfoIsMutable(); + info_.addInt(v); + break; + } // case 8 + case 10: { + int length = input.readRawVarint32(); + int limit = input.pushLimit(length); + ensureInfoIsMutable(); + while (input.getBytesUntilLimit() > 0) { + info_.addInt(input.readUInt32()); + } + input.popLimit(limit); + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.Internal.IntList info_ = emptyIntList(); + private void ensureInfoIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + info_ = mutableCopy(info_); + bitField0_ |= 0x00000001; + } + } + /** + *
+       *
+       * Values are modelled after ODBC's SQLGetInfo() function. This information is intended to provide
+       * Flight SQL clients with basic, SQL syntax and SQL functions related information.
+       * More information types can be added in future releases.
+       * E.g. more SQL syntax support types, scalar functions support, type conversion support etc.
+       *
+       * Note that the set of metadata may expand.
+       *
+       * Initially, Flight SQL will support the following information types:
+       * - Server Information - Range [0-500)
+       * - Syntax Information - Range [500-1000)
+       * Range [0-10,000) is reserved for defaults (see SqlInfo enum for default options).
+       * Custom options should start at 10,000.
+       *
+       * If omitted, then all metadata will be retrieved.
+       * Flight SQL Servers may choose to include additional metadata above and beyond the specified set, however they must
+       * at least return the specified set. IDs ranging from 0 to 10,000 (exclusive) are reserved for future use.
+       * If additional metadata is included, the metadata IDs should start from 10,000.
+       * 
+ * + * repeated uint32 info = 1; + * @return A list containing the info. + */ + public java.util.List + getInfoList() { + return ((bitField0_ & 0x00000001) != 0) ? + java.util.Collections.unmodifiableList(info_) : info_; + } + /** + *
+       *
+       * Values are modelled after ODBC's SQLGetInfo() function. This information is intended to provide
+       * Flight SQL clients with basic, SQL syntax and SQL functions related information.
+       * More information types can be added in future releases.
+       * E.g. more SQL syntax support types, scalar functions support, type conversion support etc.
+       *
+       * Note that the set of metadata may expand.
+       *
+       * Initially, Flight SQL will support the following information types:
+       * - Server Information - Range [0-500)
+       * - Syntax Information - Range [500-1000)
+       * Range [0-10,000) is reserved for defaults (see SqlInfo enum for default options).
+       * Custom options should start at 10,000.
+       *
+       * If omitted, then all metadata will be retrieved.
+       * Flight SQL Servers may choose to include additional metadata above and beyond the specified set, however they must
+       * at least return the specified set. IDs ranging from 0 to 10,000 (exclusive) are reserved for future use.
+       * If additional metadata is included, the metadata IDs should start from 10,000.
+       * 
+ * + * repeated uint32 info = 1; + * @return The count of info. + */ + public int getInfoCount() { + return info_.size(); + } + /** + *
+       *
+       * Values are modelled after ODBC's SQLGetInfo() function. This information is intended to provide
+       * Flight SQL clients with basic, SQL syntax and SQL functions related information.
+       * More information types can be added in future releases.
+       * E.g. more SQL syntax support types, scalar functions support, type conversion support etc.
+       *
+       * Note that the set of metadata may expand.
+       *
+       * Initially, Flight SQL will support the following information types:
+       * - Server Information - Range [0-500)
+       * - Syntax Information - Range [500-1000)
+       * Range [0-10,000) is reserved for defaults (see SqlInfo enum for default options).
+       * Custom options should start at 10,000.
+       *
+       * If omitted, then all metadata will be retrieved.
+       * Flight SQL Servers may choose to include additional metadata above and beyond the specified set, however they must
+       * at least return the specified set. IDs ranging from 0 to 10,000 (exclusive) are reserved for future use.
+       * If additional metadata is included, the metadata IDs should start from 10,000.
+       * 
+ * + * repeated uint32 info = 1; + * @param index The index of the element to return. + * @return The info at the given index. + */ + public int getInfo(int index) { + return info_.getInt(index); + } + /** + *
+       *
+       * Values are modelled after ODBC's SQLGetInfo() function. This information is intended to provide
+       * Flight SQL clients with basic, SQL syntax and SQL functions related information.
+       * More information types can be added in future releases.
+       * E.g. more SQL syntax support types, scalar functions support, type conversion support etc.
+       *
+       * Note that the set of metadata may expand.
+       *
+       * Initially, Flight SQL will support the following information types:
+       * - Server Information - Range [0-500)
+       * - Syntax Information - Range [500-1000)
+       * Range [0-10,000) is reserved for defaults (see SqlInfo enum for default options).
+       * Custom options should start at 10,000.
+       *
+       * If omitted, then all metadata will be retrieved.
+       * Flight SQL Servers may choose to include additional metadata above and beyond the specified set, however they must
+       * at least return the specified set. IDs ranging from 0 to 10,000 (exclusive) are reserved for future use.
+       * If additional metadata is included, the metadata IDs should start from 10,000.
+       * 
+ * + * repeated uint32 info = 1; + * @param index The index to set the value at. + * @param value The info to set. + * @return This builder for chaining. + */ + public Builder setInfo( + int index, int value) { + + ensureInfoIsMutable(); + info_.setInt(index, value); + onChanged(); + return this; + } + /** + *
+       *
+       * Values are modelled after ODBC's SQLGetInfo() function. This information is intended to provide
+       * Flight SQL clients with basic, SQL syntax and SQL functions related information.
+       * More information types can be added in future releases.
+       * E.g. more SQL syntax support types, scalar functions support, type conversion support etc.
+       *
+       * Note that the set of metadata may expand.
+       *
+       * Initially, Flight SQL will support the following information types:
+       * - Server Information - Range [0-500)
+       * - Syntax Information - Range [500-1000)
+       * Range [0-10,000) is reserved for defaults (see SqlInfo enum for default options).
+       * Custom options should start at 10,000.
+       *
+       * If omitted, then all metadata will be retrieved.
+       * Flight SQL Servers may choose to include additional metadata above and beyond the specified set, however they must
+       * at least return the specified set. IDs ranging from 0 to 10,000 (exclusive) are reserved for future use.
+       * If additional metadata is included, the metadata IDs should start from 10,000.
+       * 
+ * + * repeated uint32 info = 1; + * @param value The info to add. + * @return This builder for chaining. + */ + public Builder addInfo(int value) { + + ensureInfoIsMutable(); + info_.addInt(value); + onChanged(); + return this; + } + /** + *
+       *
+       * Values are modelled after ODBC's SQLGetInfo() function. This information is intended to provide
+       * Flight SQL clients with basic, SQL syntax and SQL functions related information.
+       * More information types can be added in future releases.
+       * E.g. more SQL syntax support types, scalar functions support, type conversion support etc.
+       *
+       * Note that the set of metadata may expand.
+       *
+       * Initially, Flight SQL will support the following information types:
+       * - Server Information - Range [0-500)
+       * - Syntax Information - Range [500-1000)
+       * Range [0-10,000) is reserved for defaults (see SqlInfo enum for default options).
+       * Custom options should start at 10,000.
+       *
+       * If omitted, then all metadata will be retrieved.
+       * Flight SQL Servers may choose to include additional metadata above and beyond the specified set, however they must
+       * at least return the specified set. IDs ranging from 0 to 10,000 (exclusive) are reserved for future use.
+       * If additional metadata is included, the metadata IDs should start from 10,000.
+       * 
+ * + * repeated uint32 info = 1; + * @param values The info to add. + * @return This builder for chaining. + */ + public Builder addAllInfo( + java.lang.Iterable values) { + ensureInfoIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, info_); + onChanged(); + return this; + } + /** + *
+       *
+       * Values are modelled after ODBC's SQLGetInfo() function. This information is intended to provide
+       * Flight SQL clients with basic, SQL syntax and SQL functions related information.
+       * More information types can be added in future releases.
+       * E.g. more SQL syntax support types, scalar functions support, type conversion support etc.
+       *
+       * Note that the set of metadata may expand.
+       *
+       * Initially, Flight SQL will support the following information types:
+       * - Server Information - Range [0-500)
+       * - Syntax Information - Range [500-1000)
+       * Range [0-10,000) is reserved for defaults (see SqlInfo enum for default options).
+       * Custom options should start at 10,000.
+       *
+       * If omitted, then all metadata will be retrieved.
+       * Flight SQL Servers may choose to include additional metadata above and beyond the specified set, however they must
+       * at least return the specified set. IDs ranging from 0 to 10,000 (exclusive) are reserved for future use.
+       * If additional metadata is included, the metadata IDs should start from 10,000.
+       * 
+ * + * repeated uint32 info = 1; + * @return This builder for chaining. + */ + public Builder clearInfo() { + info_ = emptyIntList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.CommandGetSqlInfo) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.CommandGetSqlInfo) + private static final org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CommandGetSqlInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetSqlInfo getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface CommandGetXdbcTypeInfoOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.CommandGetXdbcTypeInfo) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     *
+     * Specifies the data type to search for the info.
+     * 
+ * + * optional int32 data_type = 1; + * @return Whether the dataType field is set. + */ + boolean hasDataType(); + /** + *
+     *
+     * Specifies the data type to search for the info.
+     * 
+ * + * optional int32 data_type = 1; + * @return The dataType. + */ + int getDataType(); + } + /** + *
+   *
+   * Represents a request to retrieve information about data type supported on a Flight SQL enabled backend.
+   * Used in the command member of FlightDescriptor for the following RPC calls:
+   *  - GetSchema: return the schema of the query.
+   *  - GetFlightInfo: execute the catalog metadata request.
+   *
+   * The returned schema will be:
+   * <
+   *   type_name: utf8 not null (The name of the data type, for example: VARCHAR, INTEGER, etc),
+   *   data_type: int32 not null (The SQL data type),
+   *   column_size: int32 (The maximum size supported by that column.
+   *                       In case of exact numeric types, this represents the maximum precision.
+   *                       In case of string types, this represents the character length.
+   *                       In case of datetime data types, this represents the length in characters of the string representation.
+   *                       NULL is returned for data types where column size is not applicable.),
+   *   literal_prefix: utf8 (Character or characters used to prefix a literal, NULL is returned for
+   *                         data types where a literal prefix is not applicable.),
+   *   literal_suffix: utf8 (Character or characters used to terminate a literal,
+   *                         NULL is returned for data types where a literal suffix is not applicable.),
+   *   create_params: list<utf8 not null>
+   *                        (A list of keywords corresponding to which parameters can be used when creating
+   *                         a column for that specific type.
+   *                         NULL is returned if there are no parameters for the data type definition.),
+   *   nullable: int32 not null (Shows if the data type accepts a NULL value. The possible values can be seen in the
+   *                             Nullable enum.),
+   *   case_sensitive: bool not null (Shows if a character data type is case-sensitive in collations and comparisons),
+   *   searchable: int32 not null (Shows how the data type is used in a WHERE clause. The possible values can be seen in the
+   *                               Searchable enum.),
+   *   unsigned_attribute: bool (Shows if the data type is unsigned. NULL is returned if the attribute is
+   *                             not applicable to the data type or the data type is not numeric.),
+   *   fixed_prec_scale: bool not null (Shows if the data type has predefined fixed precision and scale.),
+   *   auto_increment: bool (Shows if the data type is auto incremental. NULL is returned if the attribute
+   *                         is not applicable to the data type or the data type is not numeric.),
+   *   local_type_name: utf8 (Localized version of the data source-dependent name of the data type. NULL
+   *                          is returned if a localized name is not supported by the data source),
+   *   minimum_scale: int32 (The minimum scale of the data type on the data source.
+   *                         If a data type has a fixed scale, the MINIMUM_SCALE and MAXIMUM_SCALE
+   *                         columns both contain this value. NULL is returned if scale is not applicable.),
+   *   maximum_scale: int32 (The maximum scale of the data type on the data source.
+   *                         NULL is returned if scale is not applicable.),
+   *   sql_data_type: int32 not null (The value of the SQL DATA TYPE which has the same values
+   *                                  as data_type value. Except for interval and datetime, which
+   *                                  uses generic values. More info about those types can be
+   *                                  obtained through datetime_subcode. The possible values can be seen
+   *                                  in the XdbcDataType enum.),
+   *   datetime_subcode: int32 (Only used when the SQL DATA TYPE is interval or datetime. It contains
+   *                            its sub types. For type different from interval and datetime, this value
+   *                            is NULL. The possible values can be seen in the XdbcDatetimeSubcode enum.),
+   *   num_prec_radix: int32 (If the data type is an approximate numeric type, this column contains
+   *                          the value 2 to indicate that COLUMN_SIZE specifies a number of bits. For
+   *                          exact numeric types, this column contains the value 10 to indicate that
+   *                          column size specifies a number of decimal digits. Otherwise, this column is NULL.),
+   *   interval_precision: int32 (If the data type is an interval data type, then this column contains the value
+   *                              of the interval leading precision. Otherwise, this column is NULL. This fields
+   *                              is only relevant to be used by ODBC).
+   * >
+   * The returned data should be ordered by data_type and then by type_name.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandGetXdbcTypeInfo} + */ + public static final class CommandGetXdbcTypeInfo extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.CommandGetXdbcTypeInfo) + CommandGetXdbcTypeInfoOrBuilder { + private static final long serialVersionUID = 0L; + // Use CommandGetXdbcTypeInfo.newBuilder() to construct. + private CommandGetXdbcTypeInfo(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private CommandGetXdbcTypeInfo() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new CommandGetXdbcTypeInfo(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetXdbcTypeInfo_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetXdbcTypeInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo.Builder.class); + } + + private int bitField0_; + public static final int DATA_TYPE_FIELD_NUMBER = 1; + private int dataType_ = 0; + /** + *
+     *
+     * Specifies the data type to search for the info.
+     * 
+ * + * optional int32 data_type = 1; + * @return Whether the dataType field is set. + */ + @java.lang.Override + public boolean hasDataType() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+     *
+     * Specifies the data type to search for the info.
+     * 
+ * + * optional int32 data_type = 1; + * @return The dataType. + */ + @java.lang.Override + public int getDataType() { + return dataType_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + output.writeInt32(1, dataType_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(1, dataType_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo other = (org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo) obj; + + if (hasDataType() != other.hasDataType()) return false; + if (hasDataType()) { + if (getDataType() + != other.getDataType()) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasDataType()) { + hash = (37 * hash) + DATA_TYPE_FIELD_NUMBER; + hash = (53 * hash) + getDataType(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Represents a request to retrieve information about data type supported on a Flight SQL enabled backend.
+     * Used in the command member of FlightDescriptor for the following RPC calls:
+     *  - GetSchema: return the schema of the query.
+     *  - GetFlightInfo: execute the catalog metadata request.
+     *
+     * The returned schema will be:
+     * <
+     *   type_name: utf8 not null (The name of the data type, for example: VARCHAR, INTEGER, etc),
+     *   data_type: int32 not null (The SQL data type),
+     *   column_size: int32 (The maximum size supported by that column.
+     *                       In case of exact numeric types, this represents the maximum precision.
+     *                       In case of string types, this represents the character length.
+     *                       In case of datetime data types, this represents the length in characters of the string representation.
+     *                       NULL is returned for data types where column size is not applicable.),
+     *   literal_prefix: utf8 (Character or characters used to prefix a literal, NULL is returned for
+     *                         data types where a literal prefix is not applicable.),
+     *   literal_suffix: utf8 (Character or characters used to terminate a literal,
+     *                         NULL is returned for data types where a literal suffix is not applicable.),
+     *   create_params: list<utf8 not null>
+     *                        (A list of keywords corresponding to which parameters can be used when creating
+     *                         a column for that specific type.
+     *                         NULL is returned if there are no parameters for the data type definition.),
+     *   nullable: int32 not null (Shows if the data type accepts a NULL value. The possible values can be seen in the
+     *                             Nullable enum.),
+     *   case_sensitive: bool not null (Shows if a character data type is case-sensitive in collations and comparisons),
+     *   searchable: int32 not null (Shows how the data type is used in a WHERE clause. The possible values can be seen in the
+     *                               Searchable enum.),
+     *   unsigned_attribute: bool (Shows if the data type is unsigned. NULL is returned if the attribute is
+     *                             not applicable to the data type or the data type is not numeric.),
+     *   fixed_prec_scale: bool not null (Shows if the data type has predefined fixed precision and scale.),
+     *   auto_increment: bool (Shows if the data type is auto incremental. NULL is returned if the attribute
+     *                         is not applicable to the data type or the data type is not numeric.),
+     *   local_type_name: utf8 (Localized version of the data source-dependent name of the data type. NULL
+     *                          is returned if a localized name is not supported by the data source),
+     *   minimum_scale: int32 (The minimum scale of the data type on the data source.
+     *                         If a data type has a fixed scale, the MINIMUM_SCALE and MAXIMUM_SCALE
+     *                         columns both contain this value. NULL is returned if scale is not applicable.),
+     *   maximum_scale: int32 (The maximum scale of the data type on the data source.
+     *                         NULL is returned if scale is not applicable.),
+     *   sql_data_type: int32 not null (The value of the SQL DATA TYPE which has the same values
+     *                                  as data_type value. Except for interval and datetime, which
+     *                                  uses generic values. More info about those types can be
+     *                                  obtained through datetime_subcode. The possible values can be seen
+     *                                  in the XdbcDataType enum.),
+     *   datetime_subcode: int32 (Only used when the SQL DATA TYPE is interval or datetime. It contains
+     *                            its sub types. For type different from interval and datetime, this value
+     *                            is NULL. The possible values can be seen in the XdbcDatetimeSubcode enum.),
+     *   num_prec_radix: int32 (If the data type is an approximate numeric type, this column contains
+     *                          the value 2 to indicate that COLUMN_SIZE specifies a number of bits. For
+     *                          exact numeric types, this column contains the value 10 to indicate that
+     *                          column size specifies a number of decimal digits. Otherwise, this column is NULL.),
+     *   interval_precision: int32 (If the data type is an interval data type, then this column contains the value
+     *                              of the interval leading precision. Otherwise, this column is NULL. This fields
+     *                              is only relevant to be used by ODBC).
+     * >
+     * The returned data should be ordered by data_type and then by type_name.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandGetXdbcTypeInfo} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.CommandGetXdbcTypeInfo) + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfoOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetXdbcTypeInfo_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetXdbcTypeInfo_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + dataType_ = 0; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetXdbcTypeInfo_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo build() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo result = new org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.dataType_ = dataType_; + to_bitField0_ |= 0x00000001; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo.getDefaultInstance()) return this; + if (other.hasDataType()) { + setDataType(other.getDataType()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + dataType_ = input.readInt32(); + bitField0_ |= 0x00000001; + break; + } // case 8 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private int dataType_ ; + /** + *
+       *
+       * Specifies the data type to search for the info.
+       * 
+ * + * optional int32 data_type = 1; + * @return Whether the dataType field is set. + */ + @java.lang.Override + public boolean hasDataType() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+       *
+       * Specifies the data type to search for the info.
+       * 
+ * + * optional int32 data_type = 1; + * @return The dataType. + */ + @java.lang.Override + public int getDataType() { + return dataType_; + } + /** + *
+       *
+       * Specifies the data type to search for the info.
+       * 
+ * + * optional int32 data_type = 1; + * @param value The dataType to set. + * @return This builder for chaining. + */ + public Builder setDataType(int value) { + + dataType_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies the data type to search for the info.
+       * 
+ * + * optional int32 data_type = 1; + * @return This builder for chaining. + */ + public Builder clearDataType() { + bitField0_ = (bitField0_ & ~0x00000001); + dataType_ = 0; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.CommandGetXdbcTypeInfo) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.CommandGetXdbcTypeInfo) + private static final org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CommandGetXdbcTypeInfo parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetXdbcTypeInfo getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface CommandGetCatalogsOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.CommandGetCatalogs) + com.google.protobuf.MessageOrBuilder { + } + /** + *
+   *
+   * Represents a request to retrieve the list of catalogs on a Flight SQL enabled backend.
+   * The definition of a catalog depends on vendor/implementation. It is usually the database itself
+   * Used in the command member of FlightDescriptor for the following RPC calls:
+   *  - GetSchema: return the Arrow schema of the query.
+   *  - GetFlightInfo: execute the catalog metadata request.
+   *
+   * The returned Arrow schema will be:
+   * <
+   *  catalog_name: utf8 not null
+   * >
+   * The returned data should be ordered by catalog_name.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandGetCatalogs} + */ + public static final class CommandGetCatalogs extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.CommandGetCatalogs) + CommandGetCatalogsOrBuilder { + private static final long serialVersionUID = 0L; + // Use CommandGetCatalogs.newBuilder() to construct. + private CommandGetCatalogs(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private CommandGetCatalogs() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new CommandGetCatalogs(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetCatalogs_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetCatalogs_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs.Builder.class); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs other = (org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs) obj; + + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Represents a request to retrieve the list of catalogs on a Flight SQL enabled backend.
+     * The definition of a catalog depends on vendor/implementation. It is usually the database itself
+     * Used in the command member of FlightDescriptor for the following RPC calls:
+     *  - GetSchema: return the Arrow schema of the query.
+     *  - GetFlightInfo: execute the catalog metadata request.
+     *
+     * The returned Arrow schema will be:
+     * <
+     *  catalog_name: utf8 not null
+     * >
+     * The returned data should be ordered by catalog_name.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandGetCatalogs} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.CommandGetCatalogs) + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetCatalogs_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetCatalogs_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetCatalogs_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs build() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs result = new org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs(this); + onBuilt(); + return result; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.CommandGetCatalogs) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.CommandGetCatalogs) + private static final org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CommandGetCatalogs parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCatalogs getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface CommandGetDbSchemasOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.CommandGetDbSchemas) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     *
+     * Specifies the Catalog to search for the tables.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return Whether the catalog field is set. + */ + boolean hasCatalog(); + /** + *
+     *
+     * Specifies the Catalog to search for the tables.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return The catalog. + */ + java.lang.String getCatalog(); + /** + *
+     *
+     * Specifies the Catalog to search for the tables.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return The bytes for catalog. + */ + com.google.protobuf.ByteString + getCatalogBytes(); + + /** + *
+     *
+     * Specifies a filter pattern for schemas to search for.
+     * When no db_schema_filter_pattern is provided, the pattern will not be used to narrow the search.
+     * In the pattern string, two special characters can be used to denote matching rules:
+     *    - "%" means to match any substring with 0 or more characters.
+     *    - "_" means to match any one character.
+     * 
+ * + * optional string db_schema_filter_pattern = 2; + * @return Whether the dbSchemaFilterPattern field is set. + */ + boolean hasDbSchemaFilterPattern(); + /** + *
+     *
+     * Specifies a filter pattern for schemas to search for.
+     * When no db_schema_filter_pattern is provided, the pattern will not be used to narrow the search.
+     * In the pattern string, two special characters can be used to denote matching rules:
+     *    - "%" means to match any substring with 0 or more characters.
+     *    - "_" means to match any one character.
+     * 
+ * + * optional string db_schema_filter_pattern = 2; + * @return The dbSchemaFilterPattern. + */ + java.lang.String getDbSchemaFilterPattern(); + /** + *
+     *
+     * Specifies a filter pattern for schemas to search for.
+     * When no db_schema_filter_pattern is provided, the pattern will not be used to narrow the search.
+     * In the pattern string, two special characters can be used to denote matching rules:
+     *    - "%" means to match any substring with 0 or more characters.
+     *    - "_" means to match any one character.
+     * 
+ * + * optional string db_schema_filter_pattern = 2; + * @return The bytes for dbSchemaFilterPattern. + */ + com.google.protobuf.ByteString + getDbSchemaFilterPatternBytes(); + } + /** + *
+   *
+   * Represents a request to retrieve the list of database schemas on a Flight SQL enabled backend.
+   * The definition of a database schema depends on vendor/implementation. It is usually a collection of tables.
+   * Used in the command member of FlightDescriptor for the following RPC calls:
+   *  - GetSchema: return the Arrow schema of the query.
+   *  - GetFlightInfo: execute the catalog metadata request.
+   *
+   * The returned Arrow schema will be:
+   * <
+   *  catalog_name: utf8,
+   *  db_schema_name: utf8 not null
+   * >
+   * The returned data should be ordered by catalog_name, then db_schema_name.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandGetDbSchemas} + */ + public static final class CommandGetDbSchemas extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.CommandGetDbSchemas) + CommandGetDbSchemasOrBuilder { + private static final long serialVersionUID = 0L; + // Use CommandGetDbSchemas.newBuilder() to construct. + private CommandGetDbSchemas(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private CommandGetDbSchemas() { + catalog_ = ""; + dbSchemaFilterPattern_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new CommandGetDbSchemas(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetDbSchemas_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetDbSchemas_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas.Builder.class); + } + + private int bitField0_; + public static final int CATALOG_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object catalog_ = ""; + /** + *
+     *
+     * Specifies the Catalog to search for the tables.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return Whether the catalog field is set. + */ + @java.lang.Override + public boolean hasCatalog() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+     *
+     * Specifies the Catalog to search for the tables.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return The catalog. + */ + @java.lang.Override + public java.lang.String getCatalog() { + java.lang.Object ref = catalog_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + catalog_ = s; + return s; + } + } + /** + *
+     *
+     * Specifies the Catalog to search for the tables.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return The bytes for catalog. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getCatalogBytes() { + java.lang.Object ref = catalog_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + catalog_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DB_SCHEMA_FILTER_PATTERN_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private volatile java.lang.Object dbSchemaFilterPattern_ = ""; + /** + *
+     *
+     * Specifies a filter pattern for schemas to search for.
+     * When no db_schema_filter_pattern is provided, the pattern will not be used to narrow the search.
+     * In the pattern string, two special characters can be used to denote matching rules:
+     *    - "%" means to match any substring with 0 or more characters.
+     *    - "_" means to match any one character.
+     * 
+ * + * optional string db_schema_filter_pattern = 2; + * @return Whether the dbSchemaFilterPattern field is set. + */ + @java.lang.Override + public boolean hasDbSchemaFilterPattern() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+     *
+     * Specifies a filter pattern for schemas to search for.
+     * When no db_schema_filter_pattern is provided, the pattern will not be used to narrow the search.
+     * In the pattern string, two special characters can be used to denote matching rules:
+     *    - "%" means to match any substring with 0 or more characters.
+     *    - "_" means to match any one character.
+     * 
+ * + * optional string db_schema_filter_pattern = 2; + * @return The dbSchemaFilterPattern. + */ + @java.lang.Override + public java.lang.String getDbSchemaFilterPattern() { + java.lang.Object ref = dbSchemaFilterPattern_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + dbSchemaFilterPattern_ = s; + return s; + } + } + /** + *
+     *
+     * Specifies a filter pattern for schemas to search for.
+     * When no db_schema_filter_pattern is provided, the pattern will not be used to narrow the search.
+     * In the pattern string, two special characters can be used to denote matching rules:
+     *    - "%" means to match any substring with 0 or more characters.
+     *    - "_" means to match any one character.
+     * 
+ * + * optional string db_schema_filter_pattern = 2; + * @return The bytes for dbSchemaFilterPattern. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getDbSchemaFilterPatternBytes() { + java.lang.Object ref = dbSchemaFilterPattern_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + dbSchemaFilterPattern_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, catalog_); + } + if (((bitField0_ & 0x00000002) != 0)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, dbSchemaFilterPattern_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, catalog_); + } + if (((bitField0_ & 0x00000002) != 0)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, dbSchemaFilterPattern_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas other = (org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas) obj; + + if (hasCatalog() != other.hasCatalog()) return false; + if (hasCatalog()) { + if (!getCatalog() + .equals(other.getCatalog())) return false; + } + if (hasDbSchemaFilterPattern() != other.hasDbSchemaFilterPattern()) return false; + if (hasDbSchemaFilterPattern()) { + if (!getDbSchemaFilterPattern() + .equals(other.getDbSchemaFilterPattern())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasCatalog()) { + hash = (37 * hash) + CATALOG_FIELD_NUMBER; + hash = (53 * hash) + getCatalog().hashCode(); + } + if (hasDbSchemaFilterPattern()) { + hash = (37 * hash) + DB_SCHEMA_FILTER_PATTERN_FIELD_NUMBER; + hash = (53 * hash) + getDbSchemaFilterPattern().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Represents a request to retrieve the list of database schemas on a Flight SQL enabled backend.
+     * The definition of a database schema depends on vendor/implementation. It is usually a collection of tables.
+     * Used in the command member of FlightDescriptor for the following RPC calls:
+     *  - GetSchema: return the Arrow schema of the query.
+     *  - GetFlightInfo: execute the catalog metadata request.
+     *
+     * The returned Arrow schema will be:
+     * <
+     *  catalog_name: utf8,
+     *  db_schema_name: utf8 not null
+     * >
+     * The returned data should be ordered by catalog_name, then db_schema_name.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandGetDbSchemas} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.CommandGetDbSchemas) + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemasOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetDbSchemas_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetDbSchemas_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + catalog_ = ""; + dbSchemaFilterPattern_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetDbSchemas_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas build() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas result = new org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.catalog_ = catalog_; + to_bitField0_ |= 0x00000001; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.dbSchemaFilterPattern_ = dbSchemaFilterPattern_; + to_bitField0_ |= 0x00000002; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas.getDefaultInstance()) return this; + if (other.hasCatalog()) { + catalog_ = other.catalog_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.hasDbSchemaFilterPattern()) { + dbSchemaFilterPattern_ = other.dbSchemaFilterPattern_; + bitField0_ |= 0x00000002; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + catalog_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + dbSchemaFilterPattern_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object catalog_ = ""; + /** + *
+       *
+       * Specifies the Catalog to search for the tables.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @return Whether the catalog field is set. + */ + public boolean hasCatalog() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+       *
+       * Specifies the Catalog to search for the tables.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @return The catalog. + */ + public java.lang.String getCatalog() { + java.lang.Object ref = catalog_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + catalog_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       *
+       * Specifies the Catalog to search for the tables.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @return The bytes for catalog. + */ + public com.google.protobuf.ByteString + getCatalogBytes() { + java.lang.Object ref = catalog_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + catalog_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       *
+       * Specifies the Catalog to search for the tables.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @param value The catalog to set. + * @return This builder for chaining. + */ + public Builder setCatalog( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + catalog_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies the Catalog to search for the tables.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @return This builder for chaining. + */ + public Builder clearCatalog() { + catalog_ = getDefaultInstance().getCatalog(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies the Catalog to search for the tables.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @param value The bytes for catalog to set. + * @return This builder for chaining. + */ + public Builder setCatalogBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + catalog_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private java.lang.Object dbSchemaFilterPattern_ = ""; + /** + *
+       *
+       * Specifies a filter pattern for schemas to search for.
+       * When no db_schema_filter_pattern is provided, the pattern will not be used to narrow the search.
+       * In the pattern string, two special characters can be used to denote matching rules:
+       *    - "%" means to match any substring with 0 or more characters.
+       *    - "_" means to match any one character.
+       * 
+ * + * optional string db_schema_filter_pattern = 2; + * @return Whether the dbSchemaFilterPattern field is set. + */ + public boolean hasDbSchemaFilterPattern() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+       *
+       * Specifies a filter pattern for schemas to search for.
+       * When no db_schema_filter_pattern is provided, the pattern will not be used to narrow the search.
+       * In the pattern string, two special characters can be used to denote matching rules:
+       *    - "%" means to match any substring with 0 or more characters.
+       *    - "_" means to match any one character.
+       * 
+ * + * optional string db_schema_filter_pattern = 2; + * @return The dbSchemaFilterPattern. + */ + public java.lang.String getDbSchemaFilterPattern() { + java.lang.Object ref = dbSchemaFilterPattern_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + dbSchemaFilterPattern_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       *
+       * Specifies a filter pattern for schemas to search for.
+       * When no db_schema_filter_pattern is provided, the pattern will not be used to narrow the search.
+       * In the pattern string, two special characters can be used to denote matching rules:
+       *    - "%" means to match any substring with 0 or more characters.
+       *    - "_" means to match any one character.
+       * 
+ * + * optional string db_schema_filter_pattern = 2; + * @return The bytes for dbSchemaFilterPattern. + */ + public com.google.protobuf.ByteString + getDbSchemaFilterPatternBytes() { + java.lang.Object ref = dbSchemaFilterPattern_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + dbSchemaFilterPattern_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       *
+       * Specifies a filter pattern for schemas to search for.
+       * When no db_schema_filter_pattern is provided, the pattern will not be used to narrow the search.
+       * In the pattern string, two special characters can be used to denote matching rules:
+       *    - "%" means to match any substring with 0 or more characters.
+       *    - "_" means to match any one character.
+       * 
+ * + * optional string db_schema_filter_pattern = 2; + * @param value The dbSchemaFilterPattern to set. + * @return This builder for chaining. + */ + public Builder setDbSchemaFilterPattern( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + dbSchemaFilterPattern_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies a filter pattern for schemas to search for.
+       * When no db_schema_filter_pattern is provided, the pattern will not be used to narrow the search.
+       * In the pattern string, two special characters can be used to denote matching rules:
+       *    - "%" means to match any substring with 0 or more characters.
+       *    - "_" means to match any one character.
+       * 
+ * + * optional string db_schema_filter_pattern = 2; + * @return This builder for chaining. + */ + public Builder clearDbSchemaFilterPattern() { + dbSchemaFilterPattern_ = getDefaultInstance().getDbSchemaFilterPattern(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies a filter pattern for schemas to search for.
+       * When no db_schema_filter_pattern is provided, the pattern will not be used to narrow the search.
+       * In the pattern string, two special characters can be used to denote matching rules:
+       *    - "%" means to match any substring with 0 or more characters.
+       *    - "_" means to match any one character.
+       * 
+ * + * optional string db_schema_filter_pattern = 2; + * @param value The bytes for dbSchemaFilterPattern to set. + * @return This builder for chaining. + */ + public Builder setDbSchemaFilterPatternBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + dbSchemaFilterPattern_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.CommandGetDbSchemas) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.CommandGetDbSchemas) + private static final org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CommandGetDbSchemas parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetDbSchemas getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface CommandGetTablesOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.CommandGetTables) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     *
+     * Specifies the Catalog to search for the tables.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return Whether the catalog field is set. + */ + boolean hasCatalog(); + /** + *
+     *
+     * Specifies the Catalog to search for the tables.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return The catalog. + */ + java.lang.String getCatalog(); + /** + *
+     *
+     * Specifies the Catalog to search for the tables.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return The bytes for catalog. + */ + com.google.protobuf.ByteString + getCatalogBytes(); + + /** + *
+     *
+     * Specifies a filter pattern for schemas to search for.
+     * When no db_schema_filter_pattern is provided, all schemas matching other filters are searched.
+     * In the pattern string, two special characters can be used to denote matching rules:
+     *    - "%" means to match any substring with 0 or more characters.
+     *    - "_" means to match any one character.
+     * 
+ * + * optional string db_schema_filter_pattern = 2; + * @return Whether the dbSchemaFilterPattern field is set. + */ + boolean hasDbSchemaFilterPattern(); + /** + *
+     *
+     * Specifies a filter pattern for schemas to search for.
+     * When no db_schema_filter_pattern is provided, all schemas matching other filters are searched.
+     * In the pattern string, two special characters can be used to denote matching rules:
+     *    - "%" means to match any substring with 0 or more characters.
+     *    - "_" means to match any one character.
+     * 
+ * + * optional string db_schema_filter_pattern = 2; + * @return The dbSchemaFilterPattern. + */ + java.lang.String getDbSchemaFilterPattern(); + /** + *
+     *
+     * Specifies a filter pattern for schemas to search for.
+     * When no db_schema_filter_pattern is provided, all schemas matching other filters are searched.
+     * In the pattern string, two special characters can be used to denote matching rules:
+     *    - "%" means to match any substring with 0 or more characters.
+     *    - "_" means to match any one character.
+     * 
+ * + * optional string db_schema_filter_pattern = 2; + * @return The bytes for dbSchemaFilterPattern. + */ + com.google.protobuf.ByteString + getDbSchemaFilterPatternBytes(); + + /** + *
+     *
+     * Specifies a filter pattern for tables to search for.
+     * When no table_name_filter_pattern is provided, all tables matching other filters are searched.
+     * In the pattern string, two special characters can be used to denote matching rules:
+     *    - "%" means to match any substring with 0 or more characters.
+     *    - "_" means to match any one character.
+     * 
+ * + * optional string table_name_filter_pattern = 3; + * @return Whether the tableNameFilterPattern field is set. + */ + boolean hasTableNameFilterPattern(); + /** + *
+     *
+     * Specifies a filter pattern for tables to search for.
+     * When no table_name_filter_pattern is provided, all tables matching other filters are searched.
+     * In the pattern string, two special characters can be used to denote matching rules:
+     *    - "%" means to match any substring with 0 or more characters.
+     *    - "_" means to match any one character.
+     * 
+ * + * optional string table_name_filter_pattern = 3; + * @return The tableNameFilterPattern. + */ + java.lang.String getTableNameFilterPattern(); + /** + *
+     *
+     * Specifies a filter pattern for tables to search for.
+     * When no table_name_filter_pattern is provided, all tables matching other filters are searched.
+     * In the pattern string, two special characters can be used to denote matching rules:
+     *    - "%" means to match any substring with 0 or more characters.
+     *    - "_" means to match any one character.
+     * 
+ * + * optional string table_name_filter_pattern = 3; + * @return The bytes for tableNameFilterPattern. + */ + com.google.protobuf.ByteString + getTableNameFilterPatternBytes(); + + /** + *
+     *
+     * Specifies a filter of table types which must match.
+     * The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables.
+     * TABLE, VIEW, and SYSTEM TABLE are commonly supported.
+     * 
+ * + * repeated string table_types = 4; + * @return A list containing the tableTypes. + */ + java.util.List + getTableTypesList(); + /** + *
+     *
+     * Specifies a filter of table types which must match.
+     * The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables.
+     * TABLE, VIEW, and SYSTEM TABLE are commonly supported.
+     * 
+ * + * repeated string table_types = 4; + * @return The count of tableTypes. + */ + int getTableTypesCount(); + /** + *
+     *
+     * Specifies a filter of table types which must match.
+     * The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables.
+     * TABLE, VIEW, and SYSTEM TABLE are commonly supported.
+     * 
+ * + * repeated string table_types = 4; + * @param index The index of the element to return. + * @return The tableTypes at the given index. + */ + java.lang.String getTableTypes(int index); + /** + *
+     *
+     * Specifies a filter of table types which must match.
+     * The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables.
+     * TABLE, VIEW, and SYSTEM TABLE are commonly supported.
+     * 
+ * + * repeated string table_types = 4; + * @param index The index of the value to return. + * @return The bytes of the tableTypes at the given index. + */ + com.google.protobuf.ByteString + getTableTypesBytes(int index); + + /** + *
+     * Specifies if the Arrow schema should be returned for found tables.
+     * 
+ * + * bool include_schema = 5; + * @return The includeSchema. + */ + boolean getIncludeSchema(); + } + /** + *
+   *
+   * Represents a request to retrieve the list of tables, and optionally their schemas, on a Flight SQL enabled backend.
+   * Used in the command member of FlightDescriptor for the following RPC calls:
+   *  - GetSchema: return the Arrow schema of the query.
+   *  - GetFlightInfo: execute the catalog metadata request.
+   *
+   * The returned Arrow schema will be:
+   * <
+   *  catalog_name: utf8,
+   *  db_schema_name: utf8,
+   *  table_name: utf8 not null,
+   *  table_type: utf8 not null,
+   *  [optional] table_schema: bytes not null (schema of the table as described in Schema.fbs::Schema,
+   *                                           it is serialized as an IPC message.)
+   * >
+   * Fields on table_schema may contain the following metadata:
+   *  - ARROW:FLIGHT:SQL:CATALOG_NAME      - Table's catalog name
+   *  - ARROW:FLIGHT:SQL:DB_SCHEMA_NAME    - Database schema name
+   *  - ARROW:FLIGHT:SQL:TABLE_NAME        - Table name
+   *  - ARROW:FLIGHT:SQL:TYPE_NAME         - The data source-specific name for the data type of the column.
+   *  - ARROW:FLIGHT:SQL:PRECISION         - Column precision/size
+   *  - ARROW:FLIGHT:SQL:SCALE             - Column scale/decimal digits if applicable
+   *  - ARROW:FLIGHT:SQL:IS_AUTO_INCREMENT - "1" indicates if the column is auto incremented, "0" otherwise.
+   *  - ARROW:FLIGHT:SQL:IS_CASE_SENSITIVE - "1" indicates if the column is case-sensitive, "0" otherwise.
+   *  - ARROW:FLIGHT:SQL:IS_READ_ONLY      - "1" indicates if the column is read only, "0" otherwise.
+   *  - ARROW:FLIGHT:SQL:IS_SEARCHABLE     - "1" indicates if the column is searchable via WHERE clause, "0" otherwise.
+   * The returned data should be ordered by catalog_name, db_schema_name, table_name, then table_type, followed by table_schema if requested.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandGetTables} + */ + public static final class CommandGetTables extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.CommandGetTables) + CommandGetTablesOrBuilder { + private static final long serialVersionUID = 0L; + // Use CommandGetTables.newBuilder() to construct. + private CommandGetTables(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private CommandGetTables() { + catalog_ = ""; + dbSchemaFilterPattern_ = ""; + tableNameFilterPattern_ = ""; + tableTypes_ = + com.google.protobuf.LazyStringArrayList.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new CommandGetTables(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetTables_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetTables_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables.Builder.class); + } + + private int bitField0_; + public static final int CATALOG_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object catalog_ = ""; + /** + *
+     *
+     * Specifies the Catalog to search for the tables.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return Whether the catalog field is set. + */ + @java.lang.Override + public boolean hasCatalog() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+     *
+     * Specifies the Catalog to search for the tables.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return The catalog. + */ + @java.lang.Override + public java.lang.String getCatalog() { + java.lang.Object ref = catalog_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + catalog_ = s; + return s; + } + } + /** + *
+     *
+     * Specifies the Catalog to search for the tables.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return The bytes for catalog. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getCatalogBytes() { + java.lang.Object ref = catalog_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + catalog_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DB_SCHEMA_FILTER_PATTERN_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private volatile java.lang.Object dbSchemaFilterPattern_ = ""; + /** + *
+     *
+     * Specifies a filter pattern for schemas to search for.
+     * When no db_schema_filter_pattern is provided, all schemas matching other filters are searched.
+     * In the pattern string, two special characters can be used to denote matching rules:
+     *    - "%" means to match any substring with 0 or more characters.
+     *    - "_" means to match any one character.
+     * 
+ * + * optional string db_schema_filter_pattern = 2; + * @return Whether the dbSchemaFilterPattern field is set. + */ + @java.lang.Override + public boolean hasDbSchemaFilterPattern() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+     *
+     * Specifies a filter pattern for schemas to search for.
+     * When no db_schema_filter_pattern is provided, all schemas matching other filters are searched.
+     * In the pattern string, two special characters can be used to denote matching rules:
+     *    - "%" means to match any substring with 0 or more characters.
+     *    - "_" means to match any one character.
+     * 
+ * + * optional string db_schema_filter_pattern = 2; + * @return The dbSchemaFilterPattern. + */ + @java.lang.Override + public java.lang.String getDbSchemaFilterPattern() { + java.lang.Object ref = dbSchemaFilterPattern_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + dbSchemaFilterPattern_ = s; + return s; + } + } + /** + *
+     *
+     * Specifies a filter pattern for schemas to search for.
+     * When no db_schema_filter_pattern is provided, all schemas matching other filters are searched.
+     * In the pattern string, two special characters can be used to denote matching rules:
+     *    - "%" means to match any substring with 0 or more characters.
+     *    - "_" means to match any one character.
+     * 
+ * + * optional string db_schema_filter_pattern = 2; + * @return The bytes for dbSchemaFilterPattern. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getDbSchemaFilterPatternBytes() { + java.lang.Object ref = dbSchemaFilterPattern_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + dbSchemaFilterPattern_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TABLE_NAME_FILTER_PATTERN_FIELD_NUMBER = 3; + @SuppressWarnings("serial") + private volatile java.lang.Object tableNameFilterPattern_ = ""; + /** + *
+     *
+     * Specifies a filter pattern for tables to search for.
+     * When no table_name_filter_pattern is provided, all tables matching other filters are searched.
+     * In the pattern string, two special characters can be used to denote matching rules:
+     *    - "%" means to match any substring with 0 or more characters.
+     *    - "_" means to match any one character.
+     * 
+ * + * optional string table_name_filter_pattern = 3; + * @return Whether the tableNameFilterPattern field is set. + */ + @java.lang.Override + public boolean hasTableNameFilterPattern() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + *
+     *
+     * Specifies a filter pattern for tables to search for.
+     * When no table_name_filter_pattern is provided, all tables matching other filters are searched.
+     * In the pattern string, two special characters can be used to denote matching rules:
+     *    - "%" means to match any substring with 0 or more characters.
+     *    - "_" means to match any one character.
+     * 
+ * + * optional string table_name_filter_pattern = 3; + * @return The tableNameFilterPattern. + */ + @java.lang.Override + public java.lang.String getTableNameFilterPattern() { + java.lang.Object ref = tableNameFilterPattern_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + tableNameFilterPattern_ = s; + return s; + } + } + /** + *
+     *
+     * Specifies a filter pattern for tables to search for.
+     * When no table_name_filter_pattern is provided, all tables matching other filters are searched.
+     * In the pattern string, two special characters can be used to denote matching rules:
+     *    - "%" means to match any substring with 0 or more characters.
+     *    - "_" means to match any one character.
+     * 
+ * + * optional string table_name_filter_pattern = 3; + * @return The bytes for tableNameFilterPattern. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getTableNameFilterPatternBytes() { + java.lang.Object ref = tableNameFilterPattern_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + tableNameFilterPattern_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TABLE_TYPES_FIELD_NUMBER = 4; + @SuppressWarnings("serial") + private com.google.protobuf.LazyStringArrayList tableTypes_ = + com.google.protobuf.LazyStringArrayList.emptyList(); + /** + *
+     *
+     * Specifies a filter of table types which must match.
+     * The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables.
+     * TABLE, VIEW, and SYSTEM TABLE are commonly supported.
+     * 
+ * + * repeated string table_types = 4; + * @return A list containing the tableTypes. + */ + public com.google.protobuf.ProtocolStringList + getTableTypesList() { + return tableTypes_; + } + /** + *
+     *
+     * Specifies a filter of table types which must match.
+     * The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables.
+     * TABLE, VIEW, and SYSTEM TABLE are commonly supported.
+     * 
+ * + * repeated string table_types = 4; + * @return The count of tableTypes. + */ + public int getTableTypesCount() { + return tableTypes_.size(); + } + /** + *
+     *
+     * Specifies a filter of table types which must match.
+     * The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables.
+     * TABLE, VIEW, and SYSTEM TABLE are commonly supported.
+     * 
+ * + * repeated string table_types = 4; + * @param index The index of the element to return. + * @return The tableTypes at the given index. + */ + public java.lang.String getTableTypes(int index) { + return tableTypes_.get(index); + } + /** + *
+     *
+     * Specifies a filter of table types which must match.
+     * The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables.
+     * TABLE, VIEW, and SYSTEM TABLE are commonly supported.
+     * 
+ * + * repeated string table_types = 4; + * @param index The index of the value to return. + * @return The bytes of the tableTypes at the given index. + */ + public com.google.protobuf.ByteString + getTableTypesBytes(int index) { + return tableTypes_.getByteString(index); + } + + public static final int INCLUDE_SCHEMA_FIELD_NUMBER = 5; + private boolean includeSchema_ = false; + /** + *
+     * Specifies if the Arrow schema should be returned for found tables.
+     * 
+ * + * bool include_schema = 5; + * @return The includeSchema. + */ + @java.lang.Override + public boolean getIncludeSchema() { + return includeSchema_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, catalog_); + } + if (((bitField0_ & 0x00000002) != 0)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, dbSchemaFilterPattern_); + } + if (((bitField0_ & 0x00000004) != 0)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, tableNameFilterPattern_); + } + for (int i = 0; i < tableTypes_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, tableTypes_.getRaw(i)); + } + if (includeSchema_ != false) { + output.writeBool(5, includeSchema_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, catalog_); + } + if (((bitField0_ & 0x00000002) != 0)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, dbSchemaFilterPattern_); + } + if (((bitField0_ & 0x00000004) != 0)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, tableNameFilterPattern_); + } + { + int dataSize = 0; + for (int i = 0; i < tableTypes_.size(); i++) { + dataSize += computeStringSizeNoTag(tableTypes_.getRaw(i)); + } + size += dataSize; + size += 1 * getTableTypesList().size(); + } + if (includeSchema_ != false) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(5, includeSchema_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables other = (org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables) obj; + + if (hasCatalog() != other.hasCatalog()) return false; + if (hasCatalog()) { + if (!getCatalog() + .equals(other.getCatalog())) return false; + } + if (hasDbSchemaFilterPattern() != other.hasDbSchemaFilterPattern()) return false; + if (hasDbSchemaFilterPattern()) { + if (!getDbSchemaFilterPattern() + .equals(other.getDbSchemaFilterPattern())) return false; + } + if (hasTableNameFilterPattern() != other.hasTableNameFilterPattern()) return false; + if (hasTableNameFilterPattern()) { + if (!getTableNameFilterPattern() + .equals(other.getTableNameFilterPattern())) return false; + } + if (!getTableTypesList() + .equals(other.getTableTypesList())) return false; + if (getIncludeSchema() + != other.getIncludeSchema()) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasCatalog()) { + hash = (37 * hash) + CATALOG_FIELD_NUMBER; + hash = (53 * hash) + getCatalog().hashCode(); + } + if (hasDbSchemaFilterPattern()) { + hash = (37 * hash) + DB_SCHEMA_FILTER_PATTERN_FIELD_NUMBER; + hash = (53 * hash) + getDbSchemaFilterPattern().hashCode(); + } + if (hasTableNameFilterPattern()) { + hash = (37 * hash) + TABLE_NAME_FILTER_PATTERN_FIELD_NUMBER; + hash = (53 * hash) + getTableNameFilterPattern().hashCode(); + } + if (getTableTypesCount() > 0) { + hash = (37 * hash) + TABLE_TYPES_FIELD_NUMBER; + hash = (53 * hash) + getTableTypesList().hashCode(); + } + hash = (37 * hash) + INCLUDE_SCHEMA_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getIncludeSchema()); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Represents a request to retrieve the list of tables, and optionally their schemas, on a Flight SQL enabled backend.
+     * Used in the command member of FlightDescriptor for the following RPC calls:
+     *  - GetSchema: return the Arrow schema of the query.
+     *  - GetFlightInfo: execute the catalog metadata request.
+     *
+     * The returned Arrow schema will be:
+     * <
+     *  catalog_name: utf8,
+     *  db_schema_name: utf8,
+     *  table_name: utf8 not null,
+     *  table_type: utf8 not null,
+     *  [optional] table_schema: bytes not null (schema of the table as described in Schema.fbs::Schema,
+     *                                           it is serialized as an IPC message.)
+     * >
+     * Fields on table_schema may contain the following metadata:
+     *  - ARROW:FLIGHT:SQL:CATALOG_NAME      - Table's catalog name
+     *  - ARROW:FLIGHT:SQL:DB_SCHEMA_NAME    - Database schema name
+     *  - ARROW:FLIGHT:SQL:TABLE_NAME        - Table name
+     *  - ARROW:FLIGHT:SQL:TYPE_NAME         - The data source-specific name for the data type of the column.
+     *  - ARROW:FLIGHT:SQL:PRECISION         - Column precision/size
+     *  - ARROW:FLIGHT:SQL:SCALE             - Column scale/decimal digits if applicable
+     *  - ARROW:FLIGHT:SQL:IS_AUTO_INCREMENT - "1" indicates if the column is auto incremented, "0" otherwise.
+     *  - ARROW:FLIGHT:SQL:IS_CASE_SENSITIVE - "1" indicates if the column is case-sensitive, "0" otherwise.
+     *  - ARROW:FLIGHT:SQL:IS_READ_ONLY      - "1" indicates if the column is read only, "0" otherwise.
+     *  - ARROW:FLIGHT:SQL:IS_SEARCHABLE     - "1" indicates if the column is searchable via WHERE clause, "0" otherwise.
+     * The returned data should be ordered by catalog_name, db_schema_name, table_name, then table_type, followed by table_schema if requested.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandGetTables} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.CommandGetTables) + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTablesOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetTables_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetTables_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + catalog_ = ""; + dbSchemaFilterPattern_ = ""; + tableNameFilterPattern_ = ""; + tableTypes_ = + com.google.protobuf.LazyStringArrayList.emptyList(); + includeSchema_ = false; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetTables_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables build() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables result = new org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.catalog_ = catalog_; + to_bitField0_ |= 0x00000001; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.dbSchemaFilterPattern_ = dbSchemaFilterPattern_; + to_bitField0_ |= 0x00000002; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.tableNameFilterPattern_ = tableNameFilterPattern_; + to_bitField0_ |= 0x00000004; + } + if (((from_bitField0_ & 0x00000008) != 0)) { + tableTypes_.makeImmutable(); + result.tableTypes_ = tableTypes_; + } + if (((from_bitField0_ & 0x00000010) != 0)) { + result.includeSchema_ = includeSchema_; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables.getDefaultInstance()) return this; + if (other.hasCatalog()) { + catalog_ = other.catalog_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.hasDbSchemaFilterPattern()) { + dbSchemaFilterPattern_ = other.dbSchemaFilterPattern_; + bitField0_ |= 0x00000002; + onChanged(); + } + if (other.hasTableNameFilterPattern()) { + tableNameFilterPattern_ = other.tableNameFilterPattern_; + bitField0_ |= 0x00000004; + onChanged(); + } + if (!other.tableTypes_.isEmpty()) { + if (tableTypes_.isEmpty()) { + tableTypes_ = other.tableTypes_; + bitField0_ |= 0x00000008; + } else { + ensureTableTypesIsMutable(); + tableTypes_.addAll(other.tableTypes_); + } + onChanged(); + } + if (other.getIncludeSchema() != false) { + setIncludeSchema(other.getIncludeSchema()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + catalog_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + dbSchemaFilterPattern_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: { + tableNameFilterPattern_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + case 34: { + java.lang.String s = input.readStringRequireUtf8(); + ensureTableTypesIsMutable(); + tableTypes_.add(s); + break; + } // case 34 + case 40: { + includeSchema_ = input.readBool(); + bitField0_ |= 0x00000010; + break; + } // case 40 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object catalog_ = ""; + /** + *
+       *
+       * Specifies the Catalog to search for the tables.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @return Whether the catalog field is set. + */ + public boolean hasCatalog() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+       *
+       * Specifies the Catalog to search for the tables.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @return The catalog. + */ + public java.lang.String getCatalog() { + java.lang.Object ref = catalog_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + catalog_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       *
+       * Specifies the Catalog to search for the tables.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @return The bytes for catalog. + */ + public com.google.protobuf.ByteString + getCatalogBytes() { + java.lang.Object ref = catalog_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + catalog_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       *
+       * Specifies the Catalog to search for the tables.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @param value The catalog to set. + * @return This builder for chaining. + */ + public Builder setCatalog( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + catalog_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies the Catalog to search for the tables.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @return This builder for chaining. + */ + public Builder clearCatalog() { + catalog_ = getDefaultInstance().getCatalog(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies the Catalog to search for the tables.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @param value The bytes for catalog to set. + * @return This builder for chaining. + */ + public Builder setCatalogBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + catalog_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private java.lang.Object dbSchemaFilterPattern_ = ""; + /** + *
+       *
+       * Specifies a filter pattern for schemas to search for.
+       * When no db_schema_filter_pattern is provided, all schemas matching other filters are searched.
+       * In the pattern string, two special characters can be used to denote matching rules:
+       *    - "%" means to match any substring with 0 or more characters.
+       *    - "_" means to match any one character.
+       * 
+ * + * optional string db_schema_filter_pattern = 2; + * @return Whether the dbSchemaFilterPattern field is set. + */ + public boolean hasDbSchemaFilterPattern() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+       *
+       * Specifies a filter pattern for schemas to search for.
+       * When no db_schema_filter_pattern is provided, all schemas matching other filters are searched.
+       * In the pattern string, two special characters can be used to denote matching rules:
+       *    - "%" means to match any substring with 0 or more characters.
+       *    - "_" means to match any one character.
+       * 
+ * + * optional string db_schema_filter_pattern = 2; + * @return The dbSchemaFilterPattern. + */ + public java.lang.String getDbSchemaFilterPattern() { + java.lang.Object ref = dbSchemaFilterPattern_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + dbSchemaFilterPattern_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       *
+       * Specifies a filter pattern for schemas to search for.
+       * When no db_schema_filter_pattern is provided, all schemas matching other filters are searched.
+       * In the pattern string, two special characters can be used to denote matching rules:
+       *    - "%" means to match any substring with 0 or more characters.
+       *    - "_" means to match any one character.
+       * 
+ * + * optional string db_schema_filter_pattern = 2; + * @return The bytes for dbSchemaFilterPattern. + */ + public com.google.protobuf.ByteString + getDbSchemaFilterPatternBytes() { + java.lang.Object ref = dbSchemaFilterPattern_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + dbSchemaFilterPattern_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       *
+       * Specifies a filter pattern for schemas to search for.
+       * When no db_schema_filter_pattern is provided, all schemas matching other filters are searched.
+       * In the pattern string, two special characters can be used to denote matching rules:
+       *    - "%" means to match any substring with 0 or more characters.
+       *    - "_" means to match any one character.
+       * 
+ * + * optional string db_schema_filter_pattern = 2; + * @param value The dbSchemaFilterPattern to set. + * @return This builder for chaining. + */ + public Builder setDbSchemaFilterPattern( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + dbSchemaFilterPattern_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies a filter pattern for schemas to search for.
+       * When no db_schema_filter_pattern is provided, all schemas matching other filters are searched.
+       * In the pattern string, two special characters can be used to denote matching rules:
+       *    - "%" means to match any substring with 0 or more characters.
+       *    - "_" means to match any one character.
+       * 
+ * + * optional string db_schema_filter_pattern = 2; + * @return This builder for chaining. + */ + public Builder clearDbSchemaFilterPattern() { + dbSchemaFilterPattern_ = getDefaultInstance().getDbSchemaFilterPattern(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies a filter pattern for schemas to search for.
+       * When no db_schema_filter_pattern is provided, all schemas matching other filters are searched.
+       * In the pattern string, two special characters can be used to denote matching rules:
+       *    - "%" means to match any substring with 0 or more characters.
+       *    - "_" means to match any one character.
+       * 
+ * + * optional string db_schema_filter_pattern = 2; + * @param value The bytes for dbSchemaFilterPattern to set. + * @return This builder for chaining. + */ + public Builder setDbSchemaFilterPatternBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + dbSchemaFilterPattern_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + private java.lang.Object tableNameFilterPattern_ = ""; + /** + *
+       *
+       * Specifies a filter pattern for tables to search for.
+       * When no table_name_filter_pattern is provided, all tables matching other filters are searched.
+       * In the pattern string, two special characters can be used to denote matching rules:
+       *    - "%" means to match any substring with 0 or more characters.
+       *    - "_" means to match any one character.
+       * 
+ * + * optional string table_name_filter_pattern = 3; + * @return Whether the tableNameFilterPattern field is set. + */ + public boolean hasTableNameFilterPattern() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + *
+       *
+       * Specifies a filter pattern for tables to search for.
+       * When no table_name_filter_pattern is provided, all tables matching other filters are searched.
+       * In the pattern string, two special characters can be used to denote matching rules:
+       *    - "%" means to match any substring with 0 or more characters.
+       *    - "_" means to match any one character.
+       * 
+ * + * optional string table_name_filter_pattern = 3; + * @return The tableNameFilterPattern. + */ + public java.lang.String getTableNameFilterPattern() { + java.lang.Object ref = tableNameFilterPattern_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + tableNameFilterPattern_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       *
+       * Specifies a filter pattern for tables to search for.
+       * When no table_name_filter_pattern is provided, all tables matching other filters are searched.
+       * In the pattern string, two special characters can be used to denote matching rules:
+       *    - "%" means to match any substring with 0 or more characters.
+       *    - "_" means to match any one character.
+       * 
+ * + * optional string table_name_filter_pattern = 3; + * @return The bytes for tableNameFilterPattern. + */ + public com.google.protobuf.ByteString + getTableNameFilterPatternBytes() { + java.lang.Object ref = tableNameFilterPattern_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + tableNameFilterPattern_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       *
+       * Specifies a filter pattern for tables to search for.
+       * When no table_name_filter_pattern is provided, all tables matching other filters are searched.
+       * In the pattern string, two special characters can be used to denote matching rules:
+       *    - "%" means to match any substring with 0 or more characters.
+       *    - "_" means to match any one character.
+       * 
+ * + * optional string table_name_filter_pattern = 3; + * @param value The tableNameFilterPattern to set. + * @return This builder for chaining. + */ + public Builder setTableNameFilterPattern( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + tableNameFilterPattern_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies a filter pattern for tables to search for.
+       * When no table_name_filter_pattern is provided, all tables matching other filters are searched.
+       * In the pattern string, two special characters can be used to denote matching rules:
+       *    - "%" means to match any substring with 0 or more characters.
+       *    - "_" means to match any one character.
+       * 
+ * + * optional string table_name_filter_pattern = 3; + * @return This builder for chaining. + */ + public Builder clearTableNameFilterPattern() { + tableNameFilterPattern_ = getDefaultInstance().getTableNameFilterPattern(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies a filter pattern for tables to search for.
+       * When no table_name_filter_pattern is provided, all tables matching other filters are searched.
+       * In the pattern string, two special characters can be used to denote matching rules:
+       *    - "%" means to match any substring with 0 or more characters.
+       *    - "_" means to match any one character.
+       * 
+ * + * optional string table_name_filter_pattern = 3; + * @param value The bytes for tableNameFilterPattern to set. + * @return This builder for chaining. + */ + public Builder setTableNameFilterPatternBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + tableNameFilterPattern_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + + private com.google.protobuf.LazyStringArrayList tableTypes_ = + com.google.protobuf.LazyStringArrayList.emptyList(); + private void ensureTableTypesIsMutable() { + if (!tableTypes_.isModifiable()) { + tableTypes_ = new com.google.protobuf.LazyStringArrayList(tableTypes_); + } + bitField0_ |= 0x00000008; + } + /** + *
+       *
+       * Specifies a filter of table types which must match.
+       * The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables.
+       * TABLE, VIEW, and SYSTEM TABLE are commonly supported.
+       * 
+ * + * repeated string table_types = 4; + * @return A list containing the tableTypes. + */ + public com.google.protobuf.ProtocolStringList + getTableTypesList() { + tableTypes_.makeImmutable(); + return tableTypes_; + } + /** + *
+       *
+       * Specifies a filter of table types which must match.
+       * The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables.
+       * TABLE, VIEW, and SYSTEM TABLE are commonly supported.
+       * 
+ * + * repeated string table_types = 4; + * @return The count of tableTypes. + */ + public int getTableTypesCount() { + return tableTypes_.size(); + } + /** + *
+       *
+       * Specifies a filter of table types which must match.
+       * The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables.
+       * TABLE, VIEW, and SYSTEM TABLE are commonly supported.
+       * 
+ * + * repeated string table_types = 4; + * @param index The index of the element to return. + * @return The tableTypes at the given index. + */ + public java.lang.String getTableTypes(int index) { + return tableTypes_.get(index); + } + /** + *
+       *
+       * Specifies a filter of table types which must match.
+       * The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables.
+       * TABLE, VIEW, and SYSTEM TABLE are commonly supported.
+       * 
+ * + * repeated string table_types = 4; + * @param index The index of the value to return. + * @return The bytes of the tableTypes at the given index. + */ + public com.google.protobuf.ByteString + getTableTypesBytes(int index) { + return tableTypes_.getByteString(index); + } + /** + *
+       *
+       * Specifies a filter of table types which must match.
+       * The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables.
+       * TABLE, VIEW, and SYSTEM TABLE are commonly supported.
+       * 
+ * + * repeated string table_types = 4; + * @param index The index to set the value at. + * @param value The tableTypes to set. + * @return This builder for chaining. + */ + public Builder setTableTypes( + int index, java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + ensureTableTypesIsMutable(); + tableTypes_.set(index, value); + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies a filter of table types which must match.
+       * The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables.
+       * TABLE, VIEW, and SYSTEM TABLE are commonly supported.
+       * 
+ * + * repeated string table_types = 4; + * @param value The tableTypes to add. + * @return This builder for chaining. + */ + public Builder addTableTypes( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + ensureTableTypesIsMutable(); + tableTypes_.add(value); + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies a filter of table types which must match.
+       * The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables.
+       * TABLE, VIEW, and SYSTEM TABLE are commonly supported.
+       * 
+ * + * repeated string table_types = 4; + * @param values The tableTypes to add. + * @return This builder for chaining. + */ + public Builder addAllTableTypes( + java.lang.Iterable values) { + ensureTableTypesIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, tableTypes_); + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies a filter of table types which must match.
+       * The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables.
+       * TABLE, VIEW, and SYSTEM TABLE are commonly supported.
+       * 
+ * + * repeated string table_types = 4; + * @return This builder for chaining. + */ + public Builder clearTableTypes() { + tableTypes_ = + com.google.protobuf.LazyStringArrayList.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008);; + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies a filter of table types which must match.
+       * The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables.
+       * TABLE, VIEW, and SYSTEM TABLE are commonly supported.
+       * 
+ * + * repeated string table_types = 4; + * @param value The bytes of the tableTypes to add. + * @return This builder for chaining. + */ + public Builder addTableTypesBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + ensureTableTypesIsMutable(); + tableTypes_.add(value); + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + + private boolean includeSchema_ ; + /** + *
+       * Specifies if the Arrow schema should be returned for found tables.
+       * 
+ * + * bool include_schema = 5; + * @return The includeSchema. + */ + @java.lang.Override + public boolean getIncludeSchema() { + return includeSchema_; + } + /** + *
+       * Specifies if the Arrow schema should be returned for found tables.
+       * 
+ * + * bool include_schema = 5; + * @param value The includeSchema to set. + * @return This builder for chaining. + */ + public Builder setIncludeSchema(boolean value) { + + includeSchema_ = value; + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + *
+       * Specifies if the Arrow schema should be returned for found tables.
+       * 
+ * + * bool include_schema = 5; + * @return This builder for chaining. + */ + public Builder clearIncludeSchema() { + bitField0_ = (bitField0_ & ~0x00000010); + includeSchema_ = false; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.CommandGetTables) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.CommandGetTables) + private static final org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CommandGetTables parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTables getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface CommandGetTableTypesOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.CommandGetTableTypes) + com.google.protobuf.MessageOrBuilder { + } + /** + *
+   *
+   * Represents a request to retrieve the list of table types on a Flight SQL enabled backend.
+   * The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables.
+   * TABLE, VIEW, and SYSTEM TABLE are commonly supported.
+   * Used in the command member of FlightDescriptor for the following RPC calls:
+   *  - GetSchema: return the Arrow schema of the query.
+   *  - GetFlightInfo: execute the catalog metadata request.
+   *
+   * The returned Arrow schema will be:
+   * <
+   *  table_type: utf8 not null
+   * >
+   * The returned data should be ordered by table_type.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandGetTableTypes} + */ + public static final class CommandGetTableTypes extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.CommandGetTableTypes) + CommandGetTableTypesOrBuilder { + private static final long serialVersionUID = 0L; + // Use CommandGetTableTypes.newBuilder() to construct. + private CommandGetTableTypes(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private CommandGetTableTypes() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new CommandGetTableTypes(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetTableTypes_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetTableTypes_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes.Builder.class); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes other = (org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes) obj; + + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Represents a request to retrieve the list of table types on a Flight SQL enabled backend.
+     * The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables.
+     * TABLE, VIEW, and SYSTEM TABLE are commonly supported.
+     * Used in the command member of FlightDescriptor for the following RPC calls:
+     *  - GetSchema: return the Arrow schema of the query.
+     *  - GetFlightInfo: execute the catalog metadata request.
+     *
+     * The returned Arrow schema will be:
+     * <
+     *  table_type: utf8 not null
+     * >
+     * The returned data should be ordered by table_type.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandGetTableTypes} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.CommandGetTableTypes) + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypesOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetTableTypes_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetTableTypes_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetTableTypes_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes build() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes result = new org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes(this); + onBuilt(); + return result; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.CommandGetTableTypes) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.CommandGetTableTypes) + private static final org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CommandGetTableTypes parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetTableTypes getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface CommandGetPrimaryKeysOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.CommandGetPrimaryKeys) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     *
+     * Specifies the catalog to search for the table.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return Whether the catalog field is set. + */ + boolean hasCatalog(); + /** + *
+     *
+     * Specifies the catalog to search for the table.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return The catalog. + */ + java.lang.String getCatalog(); + /** + *
+     *
+     * Specifies the catalog to search for the table.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return The bytes for catalog. + */ + com.google.protobuf.ByteString + getCatalogBytes(); + + /** + *
+     *
+     * Specifies the schema to search for the table.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string db_schema = 2; + * @return Whether the dbSchema field is set. + */ + boolean hasDbSchema(); + /** + *
+     *
+     * Specifies the schema to search for the table.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string db_schema = 2; + * @return The dbSchema. + */ + java.lang.String getDbSchema(); + /** + *
+     *
+     * Specifies the schema to search for the table.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string db_schema = 2; + * @return The bytes for dbSchema. + */ + com.google.protobuf.ByteString + getDbSchemaBytes(); + + /** + *
+     * Specifies the table to get the primary keys for.
+     * 
+ * + * string table = 3; + * @return The table. + */ + java.lang.String getTable(); + /** + *
+     * Specifies the table to get the primary keys for.
+     * 
+ * + * string table = 3; + * @return The bytes for table. + */ + com.google.protobuf.ByteString + getTableBytes(); + } + /** + *
+   *
+   * Represents a request to retrieve the primary keys of a table on a Flight SQL enabled backend.
+   * Used in the command member of FlightDescriptor for the following RPC calls:
+   *  - GetSchema: return the Arrow schema of the query.
+   *  - GetFlightInfo: execute the catalog metadata request.
+   *
+   * The returned Arrow schema will be:
+   * <
+   *  catalog_name: utf8,
+   *  db_schema_name: utf8,
+   *  table_name: utf8 not null,
+   *  column_name: utf8 not null,
+   *  key_name: utf8,
+   *  key_sequence: int32 not null
+   * >
+   * The returned data should be ordered by catalog_name, db_schema_name, table_name, key_name, then key_sequence.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandGetPrimaryKeys} + */ + public static final class CommandGetPrimaryKeys extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.CommandGetPrimaryKeys) + CommandGetPrimaryKeysOrBuilder { + private static final long serialVersionUID = 0L; + // Use CommandGetPrimaryKeys.newBuilder() to construct. + private CommandGetPrimaryKeys(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private CommandGetPrimaryKeys() { + catalog_ = ""; + dbSchema_ = ""; + table_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new CommandGetPrimaryKeys(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetPrimaryKeys_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetPrimaryKeys_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys.Builder.class); + } + + private int bitField0_; + public static final int CATALOG_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object catalog_ = ""; + /** + *
+     *
+     * Specifies the catalog to search for the table.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return Whether the catalog field is set. + */ + @java.lang.Override + public boolean hasCatalog() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+     *
+     * Specifies the catalog to search for the table.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return The catalog. + */ + @java.lang.Override + public java.lang.String getCatalog() { + java.lang.Object ref = catalog_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + catalog_ = s; + return s; + } + } + /** + *
+     *
+     * Specifies the catalog to search for the table.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return The bytes for catalog. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getCatalogBytes() { + java.lang.Object ref = catalog_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + catalog_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DB_SCHEMA_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private volatile java.lang.Object dbSchema_ = ""; + /** + *
+     *
+     * Specifies the schema to search for the table.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string db_schema = 2; + * @return Whether the dbSchema field is set. + */ + @java.lang.Override + public boolean hasDbSchema() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+     *
+     * Specifies the schema to search for the table.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string db_schema = 2; + * @return The dbSchema. + */ + @java.lang.Override + public java.lang.String getDbSchema() { + java.lang.Object ref = dbSchema_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + dbSchema_ = s; + return s; + } + } + /** + *
+     *
+     * Specifies the schema to search for the table.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string db_schema = 2; + * @return The bytes for dbSchema. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getDbSchemaBytes() { + java.lang.Object ref = dbSchema_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + dbSchema_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TABLE_FIELD_NUMBER = 3; + @SuppressWarnings("serial") + private volatile java.lang.Object table_ = ""; + /** + *
+     * Specifies the table to get the primary keys for.
+     * 
+ * + * string table = 3; + * @return The table. + */ + @java.lang.Override + public java.lang.String getTable() { + java.lang.Object ref = table_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + table_ = s; + return s; + } + } + /** + *
+     * Specifies the table to get the primary keys for.
+     * 
+ * + * string table = 3; + * @return The bytes for table. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getTableBytes() { + java.lang.Object ref = table_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + table_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, catalog_); + } + if (((bitField0_ & 0x00000002) != 0)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, dbSchema_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(table_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, table_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, catalog_); + } + if (((bitField0_ & 0x00000002) != 0)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, dbSchema_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(table_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, table_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys other = (org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys) obj; + + if (hasCatalog() != other.hasCatalog()) return false; + if (hasCatalog()) { + if (!getCatalog() + .equals(other.getCatalog())) return false; + } + if (hasDbSchema() != other.hasDbSchema()) return false; + if (hasDbSchema()) { + if (!getDbSchema() + .equals(other.getDbSchema())) return false; + } + if (!getTable() + .equals(other.getTable())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasCatalog()) { + hash = (37 * hash) + CATALOG_FIELD_NUMBER; + hash = (53 * hash) + getCatalog().hashCode(); + } + if (hasDbSchema()) { + hash = (37 * hash) + DB_SCHEMA_FIELD_NUMBER; + hash = (53 * hash) + getDbSchema().hashCode(); + } + hash = (37 * hash) + TABLE_FIELD_NUMBER; + hash = (53 * hash) + getTable().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Represents a request to retrieve the primary keys of a table on a Flight SQL enabled backend.
+     * Used in the command member of FlightDescriptor for the following RPC calls:
+     *  - GetSchema: return the Arrow schema of the query.
+     *  - GetFlightInfo: execute the catalog metadata request.
+     *
+     * The returned Arrow schema will be:
+     * <
+     *  catalog_name: utf8,
+     *  db_schema_name: utf8,
+     *  table_name: utf8 not null,
+     *  column_name: utf8 not null,
+     *  key_name: utf8,
+     *  key_sequence: int32 not null
+     * >
+     * The returned data should be ordered by catalog_name, db_schema_name, table_name, key_name, then key_sequence.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandGetPrimaryKeys} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.CommandGetPrimaryKeys) + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeysOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetPrimaryKeys_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetPrimaryKeys_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + catalog_ = ""; + dbSchema_ = ""; + table_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetPrimaryKeys_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys build() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys result = new org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.catalog_ = catalog_; + to_bitField0_ |= 0x00000001; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.dbSchema_ = dbSchema_; + to_bitField0_ |= 0x00000002; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.table_ = table_; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys.getDefaultInstance()) return this; + if (other.hasCatalog()) { + catalog_ = other.catalog_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.hasDbSchema()) { + dbSchema_ = other.dbSchema_; + bitField0_ |= 0x00000002; + onChanged(); + } + if (!other.getTable().isEmpty()) { + table_ = other.table_; + bitField0_ |= 0x00000004; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + catalog_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + dbSchema_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: { + table_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object catalog_ = ""; + /** + *
+       *
+       * Specifies the catalog to search for the table.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @return Whether the catalog field is set. + */ + public boolean hasCatalog() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+       *
+       * Specifies the catalog to search for the table.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @return The catalog. + */ + public java.lang.String getCatalog() { + java.lang.Object ref = catalog_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + catalog_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       *
+       * Specifies the catalog to search for the table.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @return The bytes for catalog. + */ + public com.google.protobuf.ByteString + getCatalogBytes() { + java.lang.Object ref = catalog_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + catalog_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       *
+       * Specifies the catalog to search for the table.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @param value The catalog to set. + * @return This builder for chaining. + */ + public Builder setCatalog( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + catalog_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies the catalog to search for the table.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @return This builder for chaining. + */ + public Builder clearCatalog() { + catalog_ = getDefaultInstance().getCatalog(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies the catalog to search for the table.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @param value The bytes for catalog to set. + * @return This builder for chaining. + */ + public Builder setCatalogBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + catalog_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private java.lang.Object dbSchema_ = ""; + /** + *
+       *
+       * Specifies the schema to search for the table.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string db_schema = 2; + * @return Whether the dbSchema field is set. + */ + public boolean hasDbSchema() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+       *
+       * Specifies the schema to search for the table.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string db_schema = 2; + * @return The dbSchema. + */ + public java.lang.String getDbSchema() { + java.lang.Object ref = dbSchema_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + dbSchema_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       *
+       * Specifies the schema to search for the table.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string db_schema = 2; + * @return The bytes for dbSchema. + */ + public com.google.protobuf.ByteString + getDbSchemaBytes() { + java.lang.Object ref = dbSchema_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + dbSchema_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       *
+       * Specifies the schema to search for the table.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string db_schema = 2; + * @param value The dbSchema to set. + * @return This builder for chaining. + */ + public Builder setDbSchema( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + dbSchema_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies the schema to search for the table.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string db_schema = 2; + * @return This builder for chaining. + */ + public Builder clearDbSchema() { + dbSchema_ = getDefaultInstance().getDbSchema(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies the schema to search for the table.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string db_schema = 2; + * @param value The bytes for dbSchema to set. + * @return This builder for chaining. + */ + public Builder setDbSchemaBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + dbSchema_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + private java.lang.Object table_ = ""; + /** + *
+       * Specifies the table to get the primary keys for.
+       * 
+ * + * string table = 3; + * @return The table. + */ + public java.lang.String getTable() { + java.lang.Object ref = table_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + table_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Specifies the table to get the primary keys for.
+       * 
+ * + * string table = 3; + * @return The bytes for table. + */ + public com.google.protobuf.ByteString + getTableBytes() { + java.lang.Object ref = table_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + table_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Specifies the table to get the primary keys for.
+       * 
+ * + * string table = 3; + * @param value The table to set. + * @return This builder for chaining. + */ + public Builder setTable( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + table_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       * Specifies the table to get the primary keys for.
+       * 
+ * + * string table = 3; + * @return This builder for chaining. + */ + public Builder clearTable() { + table_ = getDefaultInstance().getTable(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + *
+       * Specifies the table to get the primary keys for.
+       * 
+ * + * string table = 3; + * @param value The bytes for table to set. + * @return This builder for chaining. + */ + public Builder setTableBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + table_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.CommandGetPrimaryKeys) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.CommandGetPrimaryKeys) + private static final org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CommandGetPrimaryKeys parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetPrimaryKeys getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface CommandGetExportedKeysOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.CommandGetExportedKeys) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     *
+     * Specifies the catalog to search for the foreign key table.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return Whether the catalog field is set. + */ + boolean hasCatalog(); + /** + *
+     *
+     * Specifies the catalog to search for the foreign key table.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return The catalog. + */ + java.lang.String getCatalog(); + /** + *
+     *
+     * Specifies the catalog to search for the foreign key table.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return The bytes for catalog. + */ + com.google.protobuf.ByteString + getCatalogBytes(); + + /** + *
+     *
+     * Specifies the schema to search for the foreign key table.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string db_schema = 2; + * @return Whether the dbSchema field is set. + */ + boolean hasDbSchema(); + /** + *
+     *
+     * Specifies the schema to search for the foreign key table.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string db_schema = 2; + * @return The dbSchema. + */ + java.lang.String getDbSchema(); + /** + *
+     *
+     * Specifies the schema to search for the foreign key table.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string db_schema = 2; + * @return The bytes for dbSchema. + */ + com.google.protobuf.ByteString + getDbSchemaBytes(); + + /** + *
+     * Specifies the foreign key table to get the foreign keys for.
+     * 
+ * + * string table = 3; + * @return The table. + */ + java.lang.String getTable(); + /** + *
+     * Specifies the foreign key table to get the foreign keys for.
+     * 
+ * + * string table = 3; + * @return The bytes for table. + */ + com.google.protobuf.ByteString + getTableBytes(); + } + /** + *
+   *
+   * Represents a request to retrieve a description of the foreign key columns that reference the given table's
+   * primary key columns (the foreign keys exported by a table) of a table on a Flight SQL enabled backend.
+   * Used in the command member of FlightDescriptor for the following RPC calls:
+   *  - GetSchema: return the Arrow schema of the query.
+   *  - GetFlightInfo: execute the catalog metadata request.
+   *
+   * The returned Arrow schema will be:
+   * <
+   *  pk_catalog_name: utf8,
+   *  pk_db_schema_name: utf8,
+   *  pk_table_name: utf8 not null,
+   *  pk_column_name: utf8 not null,
+   *  fk_catalog_name: utf8,
+   *  fk_db_schema_name: utf8,
+   *  fk_table_name: utf8 not null,
+   *  fk_column_name: utf8 not null,
+   *  key_sequence: int32 not null,
+   *  fk_key_name: utf8,
+   *  pk_key_name: utf8,
+   *  update_rule: uint8 not null,
+   *  delete_rule: uint8 not null
+   * >
+   * The returned data should be ordered by fk_catalog_name, fk_db_schema_name, fk_table_name, fk_key_name, then key_sequence.
+   * update_rule and delete_rule returns a byte that is equivalent to actions declared on UpdateDeleteRules enum.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandGetExportedKeys} + */ + public static final class CommandGetExportedKeys extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.CommandGetExportedKeys) + CommandGetExportedKeysOrBuilder { + private static final long serialVersionUID = 0L; + // Use CommandGetExportedKeys.newBuilder() to construct. + private CommandGetExportedKeys(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private CommandGetExportedKeys() { + catalog_ = ""; + dbSchema_ = ""; + table_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new CommandGetExportedKeys(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetExportedKeys_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetExportedKeys_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys.Builder.class); + } + + private int bitField0_; + public static final int CATALOG_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object catalog_ = ""; + /** + *
+     *
+     * Specifies the catalog to search for the foreign key table.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return Whether the catalog field is set. + */ + @java.lang.Override + public boolean hasCatalog() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+     *
+     * Specifies the catalog to search for the foreign key table.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return The catalog. + */ + @java.lang.Override + public java.lang.String getCatalog() { + java.lang.Object ref = catalog_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + catalog_ = s; + return s; + } + } + /** + *
+     *
+     * Specifies the catalog to search for the foreign key table.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return The bytes for catalog. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getCatalogBytes() { + java.lang.Object ref = catalog_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + catalog_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DB_SCHEMA_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private volatile java.lang.Object dbSchema_ = ""; + /** + *
+     *
+     * Specifies the schema to search for the foreign key table.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string db_schema = 2; + * @return Whether the dbSchema field is set. + */ + @java.lang.Override + public boolean hasDbSchema() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+     *
+     * Specifies the schema to search for the foreign key table.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string db_schema = 2; + * @return The dbSchema. + */ + @java.lang.Override + public java.lang.String getDbSchema() { + java.lang.Object ref = dbSchema_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + dbSchema_ = s; + return s; + } + } + /** + *
+     *
+     * Specifies the schema to search for the foreign key table.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string db_schema = 2; + * @return The bytes for dbSchema. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getDbSchemaBytes() { + java.lang.Object ref = dbSchema_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + dbSchema_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TABLE_FIELD_NUMBER = 3; + @SuppressWarnings("serial") + private volatile java.lang.Object table_ = ""; + /** + *
+     * Specifies the foreign key table to get the foreign keys for.
+     * 
+ * + * string table = 3; + * @return The table. + */ + @java.lang.Override + public java.lang.String getTable() { + java.lang.Object ref = table_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + table_ = s; + return s; + } + } + /** + *
+     * Specifies the foreign key table to get the foreign keys for.
+     * 
+ * + * string table = 3; + * @return The bytes for table. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getTableBytes() { + java.lang.Object ref = table_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + table_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, catalog_); + } + if (((bitField0_ & 0x00000002) != 0)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, dbSchema_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(table_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, table_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, catalog_); + } + if (((bitField0_ & 0x00000002) != 0)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, dbSchema_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(table_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, table_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys other = (org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys) obj; + + if (hasCatalog() != other.hasCatalog()) return false; + if (hasCatalog()) { + if (!getCatalog() + .equals(other.getCatalog())) return false; + } + if (hasDbSchema() != other.hasDbSchema()) return false; + if (hasDbSchema()) { + if (!getDbSchema() + .equals(other.getDbSchema())) return false; + } + if (!getTable() + .equals(other.getTable())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasCatalog()) { + hash = (37 * hash) + CATALOG_FIELD_NUMBER; + hash = (53 * hash) + getCatalog().hashCode(); + } + if (hasDbSchema()) { + hash = (37 * hash) + DB_SCHEMA_FIELD_NUMBER; + hash = (53 * hash) + getDbSchema().hashCode(); + } + hash = (37 * hash) + TABLE_FIELD_NUMBER; + hash = (53 * hash) + getTable().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Represents a request to retrieve a description of the foreign key columns that reference the given table's
+     * primary key columns (the foreign keys exported by a table) of a table on a Flight SQL enabled backend.
+     * Used in the command member of FlightDescriptor for the following RPC calls:
+     *  - GetSchema: return the Arrow schema of the query.
+     *  - GetFlightInfo: execute the catalog metadata request.
+     *
+     * The returned Arrow schema will be:
+     * <
+     *  pk_catalog_name: utf8,
+     *  pk_db_schema_name: utf8,
+     *  pk_table_name: utf8 not null,
+     *  pk_column_name: utf8 not null,
+     *  fk_catalog_name: utf8,
+     *  fk_db_schema_name: utf8,
+     *  fk_table_name: utf8 not null,
+     *  fk_column_name: utf8 not null,
+     *  key_sequence: int32 not null,
+     *  fk_key_name: utf8,
+     *  pk_key_name: utf8,
+     *  update_rule: uint8 not null,
+     *  delete_rule: uint8 not null
+     * >
+     * The returned data should be ordered by fk_catalog_name, fk_db_schema_name, fk_table_name, fk_key_name, then key_sequence.
+     * update_rule and delete_rule returns a byte that is equivalent to actions declared on UpdateDeleteRules enum.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandGetExportedKeys} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.CommandGetExportedKeys) + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeysOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetExportedKeys_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetExportedKeys_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + catalog_ = ""; + dbSchema_ = ""; + table_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetExportedKeys_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys build() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys result = new org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.catalog_ = catalog_; + to_bitField0_ |= 0x00000001; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.dbSchema_ = dbSchema_; + to_bitField0_ |= 0x00000002; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.table_ = table_; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys.getDefaultInstance()) return this; + if (other.hasCatalog()) { + catalog_ = other.catalog_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.hasDbSchema()) { + dbSchema_ = other.dbSchema_; + bitField0_ |= 0x00000002; + onChanged(); + } + if (!other.getTable().isEmpty()) { + table_ = other.table_; + bitField0_ |= 0x00000004; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + catalog_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + dbSchema_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: { + table_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object catalog_ = ""; + /** + *
+       *
+       * Specifies the catalog to search for the foreign key table.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @return Whether the catalog field is set. + */ + public boolean hasCatalog() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+       *
+       * Specifies the catalog to search for the foreign key table.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @return The catalog. + */ + public java.lang.String getCatalog() { + java.lang.Object ref = catalog_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + catalog_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       *
+       * Specifies the catalog to search for the foreign key table.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @return The bytes for catalog. + */ + public com.google.protobuf.ByteString + getCatalogBytes() { + java.lang.Object ref = catalog_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + catalog_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       *
+       * Specifies the catalog to search for the foreign key table.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @param value The catalog to set. + * @return This builder for chaining. + */ + public Builder setCatalog( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + catalog_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies the catalog to search for the foreign key table.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @return This builder for chaining. + */ + public Builder clearCatalog() { + catalog_ = getDefaultInstance().getCatalog(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies the catalog to search for the foreign key table.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @param value The bytes for catalog to set. + * @return This builder for chaining. + */ + public Builder setCatalogBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + catalog_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private java.lang.Object dbSchema_ = ""; + /** + *
+       *
+       * Specifies the schema to search for the foreign key table.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string db_schema = 2; + * @return Whether the dbSchema field is set. + */ + public boolean hasDbSchema() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+       *
+       * Specifies the schema to search for the foreign key table.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string db_schema = 2; + * @return The dbSchema. + */ + public java.lang.String getDbSchema() { + java.lang.Object ref = dbSchema_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + dbSchema_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       *
+       * Specifies the schema to search for the foreign key table.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string db_schema = 2; + * @return The bytes for dbSchema. + */ + public com.google.protobuf.ByteString + getDbSchemaBytes() { + java.lang.Object ref = dbSchema_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + dbSchema_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       *
+       * Specifies the schema to search for the foreign key table.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string db_schema = 2; + * @param value The dbSchema to set. + * @return This builder for chaining. + */ + public Builder setDbSchema( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + dbSchema_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies the schema to search for the foreign key table.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string db_schema = 2; + * @return This builder for chaining. + */ + public Builder clearDbSchema() { + dbSchema_ = getDefaultInstance().getDbSchema(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies the schema to search for the foreign key table.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string db_schema = 2; + * @param value The bytes for dbSchema to set. + * @return This builder for chaining. + */ + public Builder setDbSchemaBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + dbSchema_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + private java.lang.Object table_ = ""; + /** + *
+       * Specifies the foreign key table to get the foreign keys for.
+       * 
+ * + * string table = 3; + * @return The table. + */ + public java.lang.String getTable() { + java.lang.Object ref = table_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + table_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Specifies the foreign key table to get the foreign keys for.
+       * 
+ * + * string table = 3; + * @return The bytes for table. + */ + public com.google.protobuf.ByteString + getTableBytes() { + java.lang.Object ref = table_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + table_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Specifies the foreign key table to get the foreign keys for.
+       * 
+ * + * string table = 3; + * @param value The table to set. + * @return This builder for chaining. + */ + public Builder setTable( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + table_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       * Specifies the foreign key table to get the foreign keys for.
+       * 
+ * + * string table = 3; + * @return This builder for chaining. + */ + public Builder clearTable() { + table_ = getDefaultInstance().getTable(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + *
+       * Specifies the foreign key table to get the foreign keys for.
+       * 
+ * + * string table = 3; + * @param value The bytes for table to set. + * @return This builder for chaining. + */ + public Builder setTableBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + table_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.CommandGetExportedKeys) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.CommandGetExportedKeys) + private static final org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CommandGetExportedKeys parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetExportedKeys getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface CommandGetImportedKeysOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.CommandGetImportedKeys) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     *
+     * Specifies the catalog to search for the primary key table.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return Whether the catalog field is set. + */ + boolean hasCatalog(); + /** + *
+     *
+     * Specifies the catalog to search for the primary key table.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return The catalog. + */ + java.lang.String getCatalog(); + /** + *
+     *
+     * Specifies the catalog to search for the primary key table.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return The bytes for catalog. + */ + com.google.protobuf.ByteString + getCatalogBytes(); + + /** + *
+     *
+     * Specifies the schema to search for the primary key table.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string db_schema = 2; + * @return Whether the dbSchema field is set. + */ + boolean hasDbSchema(); + /** + *
+     *
+     * Specifies the schema to search for the primary key table.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string db_schema = 2; + * @return The dbSchema. + */ + java.lang.String getDbSchema(); + /** + *
+     *
+     * Specifies the schema to search for the primary key table.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string db_schema = 2; + * @return The bytes for dbSchema. + */ + com.google.protobuf.ByteString + getDbSchemaBytes(); + + /** + *
+     * Specifies the primary key table to get the foreign keys for.
+     * 
+ * + * string table = 3; + * @return The table. + */ + java.lang.String getTable(); + /** + *
+     * Specifies the primary key table to get the foreign keys for.
+     * 
+ * + * string table = 3; + * @return The bytes for table. + */ + com.google.protobuf.ByteString + getTableBytes(); + } + /** + *
+   *
+   * Represents a request to retrieve the foreign keys of a table on a Flight SQL enabled backend.
+   * Used in the command member of FlightDescriptor for the following RPC calls:
+   *  - GetSchema: return the Arrow schema of the query.
+   *  - GetFlightInfo: execute the catalog metadata request.
+   *
+   * The returned Arrow schema will be:
+   * <
+   *  pk_catalog_name: utf8,
+   *  pk_db_schema_name: utf8,
+   *  pk_table_name: utf8 not null,
+   *  pk_column_name: utf8 not null,
+   *  fk_catalog_name: utf8,
+   *  fk_db_schema_name: utf8,
+   *  fk_table_name: utf8 not null,
+   *  fk_column_name: utf8 not null,
+   *  key_sequence: int32 not null,
+   *  fk_key_name: utf8,
+   *  pk_key_name: utf8,
+   *  update_rule: uint8 not null,
+   *  delete_rule: uint8 not null
+   * >
+   * The returned data should be ordered by pk_catalog_name, pk_db_schema_name, pk_table_name, pk_key_name, then key_sequence.
+   * update_rule and delete_rule returns a byte that is equivalent to actions:
+   *    - 0 = CASCADE
+   *    - 1 = RESTRICT
+   *    - 2 = SET NULL
+   *    - 3 = NO ACTION
+   *    - 4 = SET DEFAULT
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandGetImportedKeys} + */ + public static final class CommandGetImportedKeys extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.CommandGetImportedKeys) + CommandGetImportedKeysOrBuilder { + private static final long serialVersionUID = 0L; + // Use CommandGetImportedKeys.newBuilder() to construct. + private CommandGetImportedKeys(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private CommandGetImportedKeys() { + catalog_ = ""; + dbSchema_ = ""; + table_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new CommandGetImportedKeys(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetImportedKeys_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetImportedKeys_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys.Builder.class); + } + + private int bitField0_; + public static final int CATALOG_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object catalog_ = ""; + /** + *
+     *
+     * Specifies the catalog to search for the primary key table.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return Whether the catalog field is set. + */ + @java.lang.Override + public boolean hasCatalog() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+     *
+     * Specifies the catalog to search for the primary key table.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return The catalog. + */ + @java.lang.Override + public java.lang.String getCatalog() { + java.lang.Object ref = catalog_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + catalog_ = s; + return s; + } + } + /** + *
+     *
+     * Specifies the catalog to search for the primary key table.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string catalog = 1; + * @return The bytes for catalog. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getCatalogBytes() { + java.lang.Object ref = catalog_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + catalog_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DB_SCHEMA_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private volatile java.lang.Object dbSchema_ = ""; + /** + *
+     *
+     * Specifies the schema to search for the primary key table.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string db_schema = 2; + * @return Whether the dbSchema field is set. + */ + @java.lang.Override + public boolean hasDbSchema() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+     *
+     * Specifies the schema to search for the primary key table.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string db_schema = 2; + * @return The dbSchema. + */ + @java.lang.Override + public java.lang.String getDbSchema() { + java.lang.Object ref = dbSchema_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + dbSchema_ = s; + return s; + } + } + /** + *
+     *
+     * Specifies the schema to search for the primary key table.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string db_schema = 2; + * @return The bytes for dbSchema. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getDbSchemaBytes() { + java.lang.Object ref = dbSchema_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + dbSchema_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TABLE_FIELD_NUMBER = 3; + @SuppressWarnings("serial") + private volatile java.lang.Object table_ = ""; + /** + *
+     * Specifies the primary key table to get the foreign keys for.
+     * 
+ * + * string table = 3; + * @return The table. + */ + @java.lang.Override + public java.lang.String getTable() { + java.lang.Object ref = table_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + table_ = s; + return s; + } + } + /** + *
+     * Specifies the primary key table to get the foreign keys for.
+     * 
+ * + * string table = 3; + * @return The bytes for table. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getTableBytes() { + java.lang.Object ref = table_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + table_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, catalog_); + } + if (((bitField0_ & 0x00000002) != 0)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, dbSchema_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(table_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, table_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, catalog_); + } + if (((bitField0_ & 0x00000002) != 0)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, dbSchema_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(table_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, table_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys other = (org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys) obj; + + if (hasCatalog() != other.hasCatalog()) return false; + if (hasCatalog()) { + if (!getCatalog() + .equals(other.getCatalog())) return false; + } + if (hasDbSchema() != other.hasDbSchema()) return false; + if (hasDbSchema()) { + if (!getDbSchema() + .equals(other.getDbSchema())) return false; + } + if (!getTable() + .equals(other.getTable())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasCatalog()) { + hash = (37 * hash) + CATALOG_FIELD_NUMBER; + hash = (53 * hash) + getCatalog().hashCode(); + } + if (hasDbSchema()) { + hash = (37 * hash) + DB_SCHEMA_FIELD_NUMBER; + hash = (53 * hash) + getDbSchema().hashCode(); + } + hash = (37 * hash) + TABLE_FIELD_NUMBER; + hash = (53 * hash) + getTable().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Represents a request to retrieve the foreign keys of a table on a Flight SQL enabled backend.
+     * Used in the command member of FlightDescriptor for the following RPC calls:
+     *  - GetSchema: return the Arrow schema of the query.
+     *  - GetFlightInfo: execute the catalog metadata request.
+     *
+     * The returned Arrow schema will be:
+     * <
+     *  pk_catalog_name: utf8,
+     *  pk_db_schema_name: utf8,
+     *  pk_table_name: utf8 not null,
+     *  pk_column_name: utf8 not null,
+     *  fk_catalog_name: utf8,
+     *  fk_db_schema_name: utf8,
+     *  fk_table_name: utf8 not null,
+     *  fk_column_name: utf8 not null,
+     *  key_sequence: int32 not null,
+     *  fk_key_name: utf8,
+     *  pk_key_name: utf8,
+     *  update_rule: uint8 not null,
+     *  delete_rule: uint8 not null
+     * >
+     * The returned data should be ordered by pk_catalog_name, pk_db_schema_name, pk_table_name, pk_key_name, then key_sequence.
+     * update_rule and delete_rule returns a byte that is equivalent to actions:
+     *    - 0 = CASCADE
+     *    - 1 = RESTRICT
+     *    - 2 = SET NULL
+     *    - 3 = NO ACTION
+     *    - 4 = SET DEFAULT
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandGetImportedKeys} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.CommandGetImportedKeys) + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeysOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetImportedKeys_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetImportedKeys_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + catalog_ = ""; + dbSchema_ = ""; + table_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetImportedKeys_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys build() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys result = new org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.catalog_ = catalog_; + to_bitField0_ |= 0x00000001; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.dbSchema_ = dbSchema_; + to_bitField0_ |= 0x00000002; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.table_ = table_; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys.getDefaultInstance()) return this; + if (other.hasCatalog()) { + catalog_ = other.catalog_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.hasDbSchema()) { + dbSchema_ = other.dbSchema_; + bitField0_ |= 0x00000002; + onChanged(); + } + if (!other.getTable().isEmpty()) { + table_ = other.table_; + bitField0_ |= 0x00000004; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + catalog_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + dbSchema_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: { + table_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object catalog_ = ""; + /** + *
+       *
+       * Specifies the catalog to search for the primary key table.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @return Whether the catalog field is set. + */ + public boolean hasCatalog() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+       *
+       * Specifies the catalog to search for the primary key table.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @return The catalog. + */ + public java.lang.String getCatalog() { + java.lang.Object ref = catalog_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + catalog_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       *
+       * Specifies the catalog to search for the primary key table.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @return The bytes for catalog. + */ + public com.google.protobuf.ByteString + getCatalogBytes() { + java.lang.Object ref = catalog_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + catalog_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       *
+       * Specifies the catalog to search for the primary key table.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @param value The catalog to set. + * @return This builder for chaining. + */ + public Builder setCatalog( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + catalog_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies the catalog to search for the primary key table.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @return This builder for chaining. + */ + public Builder clearCatalog() { + catalog_ = getDefaultInstance().getCatalog(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies the catalog to search for the primary key table.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string catalog = 1; + * @param value The bytes for catalog to set. + * @return This builder for chaining. + */ + public Builder setCatalogBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + catalog_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private java.lang.Object dbSchema_ = ""; + /** + *
+       *
+       * Specifies the schema to search for the primary key table.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string db_schema = 2; + * @return Whether the dbSchema field is set. + */ + public boolean hasDbSchema() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+       *
+       * Specifies the schema to search for the primary key table.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string db_schema = 2; + * @return The dbSchema. + */ + public java.lang.String getDbSchema() { + java.lang.Object ref = dbSchema_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + dbSchema_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       *
+       * Specifies the schema to search for the primary key table.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string db_schema = 2; + * @return The bytes for dbSchema. + */ + public com.google.protobuf.ByteString + getDbSchemaBytes() { + java.lang.Object ref = dbSchema_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + dbSchema_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       *
+       * Specifies the schema to search for the primary key table.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string db_schema = 2; + * @param value The dbSchema to set. + * @return This builder for chaining. + */ + public Builder setDbSchema( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + dbSchema_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies the schema to search for the primary key table.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string db_schema = 2; + * @return This builder for chaining. + */ + public Builder clearDbSchema() { + dbSchema_ = getDefaultInstance().getDbSchema(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + *
+       *
+       * Specifies the schema to search for the primary key table.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string db_schema = 2; + * @param value The bytes for dbSchema to set. + * @return This builder for chaining. + */ + public Builder setDbSchemaBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + dbSchema_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + private java.lang.Object table_ = ""; + /** + *
+       * Specifies the primary key table to get the foreign keys for.
+       * 
+ * + * string table = 3; + * @return The table. + */ + public java.lang.String getTable() { + java.lang.Object ref = table_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + table_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Specifies the primary key table to get the foreign keys for.
+       * 
+ * + * string table = 3; + * @return The bytes for table. + */ + public com.google.protobuf.ByteString + getTableBytes() { + java.lang.Object ref = table_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + table_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Specifies the primary key table to get the foreign keys for.
+       * 
+ * + * string table = 3; + * @param value The table to set. + * @return This builder for chaining. + */ + public Builder setTable( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + table_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       * Specifies the primary key table to get the foreign keys for.
+       * 
+ * + * string table = 3; + * @return This builder for chaining. + */ + public Builder clearTable() { + table_ = getDefaultInstance().getTable(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + *
+       * Specifies the primary key table to get the foreign keys for.
+       * 
+ * + * string table = 3; + * @param value The bytes for table to set. + * @return This builder for chaining. + */ + public Builder setTableBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + table_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.CommandGetImportedKeys) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.CommandGetImportedKeys) + private static final org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CommandGetImportedKeys parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetImportedKeys getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface CommandGetCrossReferenceOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.CommandGetCrossReference) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     **
+     * The catalog name where the parent table is.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string pk_catalog = 1; + * @return Whether the pkCatalog field is set. + */ + boolean hasPkCatalog(); + /** + *
+     **
+     * The catalog name where the parent table is.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string pk_catalog = 1; + * @return The pkCatalog. + */ + java.lang.String getPkCatalog(); + /** + *
+     **
+     * The catalog name where the parent table is.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string pk_catalog = 1; + * @return The bytes for pkCatalog. + */ + com.google.protobuf.ByteString + getPkCatalogBytes(); + + /** + *
+     **
+     * The Schema name where the parent table is.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string pk_db_schema = 2; + * @return Whether the pkDbSchema field is set. + */ + boolean hasPkDbSchema(); + /** + *
+     **
+     * The Schema name where the parent table is.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string pk_db_schema = 2; + * @return The pkDbSchema. + */ + java.lang.String getPkDbSchema(); + /** + *
+     **
+     * The Schema name where the parent table is.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string pk_db_schema = 2; + * @return The bytes for pkDbSchema. + */ + com.google.protobuf.ByteString + getPkDbSchemaBytes(); + + /** + *
+     **
+     * The parent table name. It cannot be null.
+     * 
+ * + * string pk_table = 3; + * @return The pkTable. + */ + java.lang.String getPkTable(); + /** + *
+     **
+     * The parent table name. It cannot be null.
+     * 
+ * + * string pk_table = 3; + * @return The bytes for pkTable. + */ + com.google.protobuf.ByteString + getPkTableBytes(); + + /** + *
+     **
+     * The catalog name where the foreign table is.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string fk_catalog = 4; + * @return Whether the fkCatalog field is set. + */ + boolean hasFkCatalog(); + /** + *
+     **
+     * The catalog name where the foreign table is.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string fk_catalog = 4; + * @return The fkCatalog. + */ + java.lang.String getFkCatalog(); + /** + *
+     **
+     * The catalog name where the foreign table is.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string fk_catalog = 4; + * @return The bytes for fkCatalog. + */ + com.google.protobuf.ByteString + getFkCatalogBytes(); + + /** + *
+     **
+     * The schema name where the foreign table is.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string fk_db_schema = 5; + * @return Whether the fkDbSchema field is set. + */ + boolean hasFkDbSchema(); + /** + *
+     **
+     * The schema name where the foreign table is.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string fk_db_schema = 5; + * @return The fkDbSchema. + */ + java.lang.String getFkDbSchema(); + /** + *
+     **
+     * The schema name where the foreign table is.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string fk_db_schema = 5; + * @return The bytes for fkDbSchema. + */ + com.google.protobuf.ByteString + getFkDbSchemaBytes(); + + /** + *
+     **
+     * The foreign table name. It cannot be null.
+     * 
+ * + * string fk_table = 6; + * @return The fkTable. + */ + java.lang.String getFkTable(); + /** + *
+     **
+     * The foreign table name. It cannot be null.
+     * 
+ * + * string fk_table = 6; + * @return The bytes for fkTable. + */ + com.google.protobuf.ByteString + getFkTableBytes(); + } + /** + *
+   *
+   * Represents a request to retrieve a description of the foreign key columns in the given foreign key table that
+   * reference the primary key or the columns representing a unique constraint of the parent table (could be the same
+   * or a different table) on a Flight SQL enabled backend.
+   * Used in the command member of FlightDescriptor for the following RPC calls:
+   *  - GetSchema: return the Arrow schema of the query.
+   *  - GetFlightInfo: execute the catalog metadata request.
+   *
+   * The returned Arrow schema will be:
+   * <
+   *  pk_catalog_name: utf8,
+   *  pk_db_schema_name: utf8,
+   *  pk_table_name: utf8 not null,
+   *  pk_column_name: utf8 not null,
+   *  fk_catalog_name: utf8,
+   *  fk_db_schema_name: utf8,
+   *  fk_table_name: utf8 not null,
+   *  fk_column_name: utf8 not null,
+   *  key_sequence: int32 not null,
+   *  fk_key_name: utf8,
+   *  pk_key_name: utf8,
+   *  update_rule: uint8 not null,
+   *  delete_rule: uint8 not null
+   * >
+   * The returned data should be ordered by pk_catalog_name, pk_db_schema_name, pk_table_name, pk_key_name, then key_sequence.
+   * update_rule and delete_rule returns a byte that is equivalent to actions:
+   *    - 0 = CASCADE
+   *    - 1 = RESTRICT
+   *    - 2 = SET NULL
+   *    - 3 = NO ACTION
+   *    - 4 = SET DEFAULT
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandGetCrossReference} + */ + public static final class CommandGetCrossReference extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.CommandGetCrossReference) + CommandGetCrossReferenceOrBuilder { + private static final long serialVersionUID = 0L; + // Use CommandGetCrossReference.newBuilder() to construct. + private CommandGetCrossReference(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private CommandGetCrossReference() { + pkCatalog_ = ""; + pkDbSchema_ = ""; + pkTable_ = ""; + fkCatalog_ = ""; + fkDbSchema_ = ""; + fkTable_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new CommandGetCrossReference(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetCrossReference_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetCrossReference_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference.Builder.class); + } + + private int bitField0_; + public static final int PK_CATALOG_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object pkCatalog_ = ""; + /** + *
+     **
+     * The catalog name where the parent table is.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string pk_catalog = 1; + * @return Whether the pkCatalog field is set. + */ + @java.lang.Override + public boolean hasPkCatalog() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+     **
+     * The catalog name where the parent table is.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string pk_catalog = 1; + * @return The pkCatalog. + */ + @java.lang.Override + public java.lang.String getPkCatalog() { + java.lang.Object ref = pkCatalog_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + pkCatalog_ = s; + return s; + } + } + /** + *
+     **
+     * The catalog name where the parent table is.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string pk_catalog = 1; + * @return The bytes for pkCatalog. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getPkCatalogBytes() { + java.lang.Object ref = pkCatalog_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + pkCatalog_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int PK_DB_SCHEMA_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private volatile java.lang.Object pkDbSchema_ = ""; + /** + *
+     **
+     * The Schema name where the parent table is.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string pk_db_schema = 2; + * @return Whether the pkDbSchema field is set. + */ + @java.lang.Override + public boolean hasPkDbSchema() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+     **
+     * The Schema name where the parent table is.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string pk_db_schema = 2; + * @return The pkDbSchema. + */ + @java.lang.Override + public java.lang.String getPkDbSchema() { + java.lang.Object ref = pkDbSchema_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + pkDbSchema_ = s; + return s; + } + } + /** + *
+     **
+     * The Schema name where the parent table is.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string pk_db_schema = 2; + * @return The bytes for pkDbSchema. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getPkDbSchemaBytes() { + java.lang.Object ref = pkDbSchema_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + pkDbSchema_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int PK_TABLE_FIELD_NUMBER = 3; + @SuppressWarnings("serial") + private volatile java.lang.Object pkTable_ = ""; + /** + *
+     **
+     * The parent table name. It cannot be null.
+     * 
+ * + * string pk_table = 3; + * @return The pkTable. + */ + @java.lang.Override + public java.lang.String getPkTable() { + java.lang.Object ref = pkTable_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + pkTable_ = s; + return s; + } + } + /** + *
+     **
+     * The parent table name. It cannot be null.
+     * 
+ * + * string pk_table = 3; + * @return The bytes for pkTable. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getPkTableBytes() { + java.lang.Object ref = pkTable_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + pkTable_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int FK_CATALOG_FIELD_NUMBER = 4; + @SuppressWarnings("serial") + private volatile java.lang.Object fkCatalog_ = ""; + /** + *
+     **
+     * The catalog name where the foreign table is.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string fk_catalog = 4; + * @return Whether the fkCatalog field is set. + */ + @java.lang.Override + public boolean hasFkCatalog() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + *
+     **
+     * The catalog name where the foreign table is.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string fk_catalog = 4; + * @return The fkCatalog. + */ + @java.lang.Override + public java.lang.String getFkCatalog() { + java.lang.Object ref = fkCatalog_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + fkCatalog_ = s; + return s; + } + } + /** + *
+     **
+     * The catalog name where the foreign table is.
+     * An empty string retrieves those without a catalog.
+     * If omitted the catalog name should not be used to narrow the search.
+     * 
+ * + * optional string fk_catalog = 4; + * @return The bytes for fkCatalog. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getFkCatalogBytes() { + java.lang.Object ref = fkCatalog_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + fkCatalog_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int FK_DB_SCHEMA_FIELD_NUMBER = 5; + @SuppressWarnings("serial") + private volatile java.lang.Object fkDbSchema_ = ""; + /** + *
+     **
+     * The schema name where the foreign table is.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string fk_db_schema = 5; + * @return Whether the fkDbSchema field is set. + */ + @java.lang.Override + public boolean hasFkDbSchema() { + return ((bitField0_ & 0x00000008) != 0); + } + /** + *
+     **
+     * The schema name where the foreign table is.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string fk_db_schema = 5; + * @return The fkDbSchema. + */ + @java.lang.Override + public java.lang.String getFkDbSchema() { + java.lang.Object ref = fkDbSchema_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + fkDbSchema_ = s; + return s; + } + } + /** + *
+     **
+     * The schema name where the foreign table is.
+     * An empty string retrieves those without a schema.
+     * If omitted the schema name should not be used to narrow the search.
+     * 
+ * + * optional string fk_db_schema = 5; + * @return The bytes for fkDbSchema. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getFkDbSchemaBytes() { + java.lang.Object ref = fkDbSchema_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + fkDbSchema_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int FK_TABLE_FIELD_NUMBER = 6; + @SuppressWarnings("serial") + private volatile java.lang.Object fkTable_ = ""; + /** + *
+     **
+     * The foreign table name. It cannot be null.
+     * 
+ * + * string fk_table = 6; + * @return The fkTable. + */ + @java.lang.Override + public java.lang.String getFkTable() { + java.lang.Object ref = fkTable_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + fkTable_ = s; + return s; + } + } + /** + *
+     **
+     * The foreign table name. It cannot be null.
+     * 
+ * + * string fk_table = 6; + * @return The bytes for fkTable. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getFkTableBytes() { + java.lang.Object ref = fkTable_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + fkTable_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, pkCatalog_); + } + if (((bitField0_ & 0x00000002) != 0)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, pkDbSchema_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pkTable_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pkTable_); + } + if (((bitField0_ & 0x00000004) != 0)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, fkCatalog_); + } + if (((bitField0_ & 0x00000008) != 0)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, fkDbSchema_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(fkTable_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 6, fkTable_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, pkCatalog_); + } + if (((bitField0_ & 0x00000002) != 0)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, pkDbSchema_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pkTable_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pkTable_); + } + if (((bitField0_ & 0x00000004) != 0)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, fkCatalog_); + } + if (((bitField0_ & 0x00000008) != 0)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, fkDbSchema_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(fkTable_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, fkTable_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference other = (org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference) obj; + + if (hasPkCatalog() != other.hasPkCatalog()) return false; + if (hasPkCatalog()) { + if (!getPkCatalog() + .equals(other.getPkCatalog())) return false; + } + if (hasPkDbSchema() != other.hasPkDbSchema()) return false; + if (hasPkDbSchema()) { + if (!getPkDbSchema() + .equals(other.getPkDbSchema())) return false; + } + if (!getPkTable() + .equals(other.getPkTable())) return false; + if (hasFkCatalog() != other.hasFkCatalog()) return false; + if (hasFkCatalog()) { + if (!getFkCatalog() + .equals(other.getFkCatalog())) return false; + } + if (hasFkDbSchema() != other.hasFkDbSchema()) return false; + if (hasFkDbSchema()) { + if (!getFkDbSchema() + .equals(other.getFkDbSchema())) return false; + } + if (!getFkTable() + .equals(other.getFkTable())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasPkCatalog()) { + hash = (37 * hash) + PK_CATALOG_FIELD_NUMBER; + hash = (53 * hash) + getPkCatalog().hashCode(); + } + if (hasPkDbSchema()) { + hash = (37 * hash) + PK_DB_SCHEMA_FIELD_NUMBER; + hash = (53 * hash) + getPkDbSchema().hashCode(); + } + hash = (37 * hash) + PK_TABLE_FIELD_NUMBER; + hash = (53 * hash) + getPkTable().hashCode(); + if (hasFkCatalog()) { + hash = (37 * hash) + FK_CATALOG_FIELD_NUMBER; + hash = (53 * hash) + getFkCatalog().hashCode(); + } + if (hasFkDbSchema()) { + hash = (37 * hash) + FK_DB_SCHEMA_FIELD_NUMBER; + hash = (53 * hash) + getFkDbSchema().hashCode(); + } + hash = (37 * hash) + FK_TABLE_FIELD_NUMBER; + hash = (53 * hash) + getFkTable().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Represents a request to retrieve a description of the foreign key columns in the given foreign key table that
+     * reference the primary key or the columns representing a unique constraint of the parent table (could be the same
+     * or a different table) on a Flight SQL enabled backend.
+     * Used in the command member of FlightDescriptor for the following RPC calls:
+     *  - GetSchema: return the Arrow schema of the query.
+     *  - GetFlightInfo: execute the catalog metadata request.
+     *
+     * The returned Arrow schema will be:
+     * <
+     *  pk_catalog_name: utf8,
+     *  pk_db_schema_name: utf8,
+     *  pk_table_name: utf8 not null,
+     *  pk_column_name: utf8 not null,
+     *  fk_catalog_name: utf8,
+     *  fk_db_schema_name: utf8,
+     *  fk_table_name: utf8 not null,
+     *  fk_column_name: utf8 not null,
+     *  key_sequence: int32 not null,
+     *  fk_key_name: utf8,
+     *  pk_key_name: utf8,
+     *  update_rule: uint8 not null,
+     *  delete_rule: uint8 not null
+     * >
+     * The returned data should be ordered by pk_catalog_name, pk_db_schema_name, pk_table_name, pk_key_name, then key_sequence.
+     * update_rule and delete_rule returns a byte that is equivalent to actions:
+     *    - 0 = CASCADE
+     *    - 1 = RESTRICT
+     *    - 2 = SET NULL
+     *    - 3 = NO ACTION
+     *    - 4 = SET DEFAULT
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandGetCrossReference} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.CommandGetCrossReference) + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReferenceOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetCrossReference_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetCrossReference_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + pkCatalog_ = ""; + pkDbSchema_ = ""; + pkTable_ = ""; + fkCatalog_ = ""; + fkDbSchema_ = ""; + fkTable_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandGetCrossReference_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference build() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference result = new org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.pkCatalog_ = pkCatalog_; + to_bitField0_ |= 0x00000001; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.pkDbSchema_ = pkDbSchema_; + to_bitField0_ |= 0x00000002; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.pkTable_ = pkTable_; + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.fkCatalog_ = fkCatalog_; + to_bitField0_ |= 0x00000004; + } + if (((from_bitField0_ & 0x00000010) != 0)) { + result.fkDbSchema_ = fkDbSchema_; + to_bitField0_ |= 0x00000008; + } + if (((from_bitField0_ & 0x00000020) != 0)) { + result.fkTable_ = fkTable_; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference.getDefaultInstance()) return this; + if (other.hasPkCatalog()) { + pkCatalog_ = other.pkCatalog_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.hasPkDbSchema()) { + pkDbSchema_ = other.pkDbSchema_; + bitField0_ |= 0x00000002; + onChanged(); + } + if (!other.getPkTable().isEmpty()) { + pkTable_ = other.pkTable_; + bitField0_ |= 0x00000004; + onChanged(); + } + if (other.hasFkCatalog()) { + fkCatalog_ = other.fkCatalog_; + bitField0_ |= 0x00000008; + onChanged(); + } + if (other.hasFkDbSchema()) { + fkDbSchema_ = other.fkDbSchema_; + bitField0_ |= 0x00000010; + onChanged(); + } + if (!other.getFkTable().isEmpty()) { + fkTable_ = other.fkTable_; + bitField0_ |= 0x00000020; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + pkCatalog_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + pkDbSchema_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: { + pkTable_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000004; + break; + } // case 26 + case 34: { + fkCatalog_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000008; + break; + } // case 34 + case 42: { + fkDbSchema_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000010; + break; + } // case 42 + case 50: { + fkTable_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000020; + break; + } // case 50 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object pkCatalog_ = ""; + /** + *
+       **
+       * The catalog name where the parent table is.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string pk_catalog = 1; + * @return Whether the pkCatalog field is set. + */ + public boolean hasPkCatalog() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+       **
+       * The catalog name where the parent table is.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string pk_catalog = 1; + * @return The pkCatalog. + */ + public java.lang.String getPkCatalog() { + java.lang.Object ref = pkCatalog_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + pkCatalog_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       **
+       * The catalog name where the parent table is.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string pk_catalog = 1; + * @return The bytes for pkCatalog. + */ + public com.google.protobuf.ByteString + getPkCatalogBytes() { + java.lang.Object ref = pkCatalog_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + pkCatalog_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       **
+       * The catalog name where the parent table is.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string pk_catalog = 1; + * @param value The pkCatalog to set. + * @return This builder for chaining. + */ + public Builder setPkCatalog( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + pkCatalog_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       **
+       * The catalog name where the parent table is.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string pk_catalog = 1; + * @return This builder for chaining. + */ + public Builder clearPkCatalog() { + pkCatalog_ = getDefaultInstance().getPkCatalog(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + *
+       **
+       * The catalog name where the parent table is.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string pk_catalog = 1; + * @param value The bytes for pkCatalog to set. + * @return This builder for chaining. + */ + public Builder setPkCatalogBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + pkCatalog_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private java.lang.Object pkDbSchema_ = ""; + /** + *
+       **
+       * The Schema name where the parent table is.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string pk_db_schema = 2; + * @return Whether the pkDbSchema field is set. + */ + public boolean hasPkDbSchema() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+       **
+       * The Schema name where the parent table is.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string pk_db_schema = 2; + * @return The pkDbSchema. + */ + public java.lang.String getPkDbSchema() { + java.lang.Object ref = pkDbSchema_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + pkDbSchema_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       **
+       * The Schema name where the parent table is.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string pk_db_schema = 2; + * @return The bytes for pkDbSchema. + */ + public com.google.protobuf.ByteString + getPkDbSchemaBytes() { + java.lang.Object ref = pkDbSchema_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + pkDbSchema_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       **
+       * The Schema name where the parent table is.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string pk_db_schema = 2; + * @param value The pkDbSchema to set. + * @return This builder for chaining. + */ + public Builder setPkDbSchema( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + pkDbSchema_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       **
+       * The Schema name where the parent table is.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string pk_db_schema = 2; + * @return This builder for chaining. + */ + public Builder clearPkDbSchema() { + pkDbSchema_ = getDefaultInstance().getPkDbSchema(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + *
+       **
+       * The Schema name where the parent table is.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string pk_db_schema = 2; + * @param value The bytes for pkDbSchema to set. + * @return This builder for chaining. + */ + public Builder setPkDbSchemaBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + pkDbSchema_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + private java.lang.Object pkTable_ = ""; + /** + *
+       **
+       * The parent table name. It cannot be null.
+       * 
+ * + * string pk_table = 3; + * @return The pkTable. + */ + public java.lang.String getPkTable() { + java.lang.Object ref = pkTable_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + pkTable_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       **
+       * The parent table name. It cannot be null.
+       * 
+ * + * string pk_table = 3; + * @return The bytes for pkTable. + */ + public com.google.protobuf.ByteString + getPkTableBytes() { + java.lang.Object ref = pkTable_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + pkTable_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       **
+       * The parent table name. It cannot be null.
+       * 
+ * + * string pk_table = 3; + * @param value The pkTable to set. + * @return This builder for chaining. + */ + public Builder setPkTable( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + pkTable_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       **
+       * The parent table name. It cannot be null.
+       * 
+ * + * string pk_table = 3; + * @return This builder for chaining. + */ + public Builder clearPkTable() { + pkTable_ = getDefaultInstance().getPkTable(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + *
+       **
+       * The parent table name. It cannot be null.
+       * 
+ * + * string pk_table = 3; + * @param value The bytes for pkTable to set. + * @return This builder for chaining. + */ + public Builder setPkTableBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + pkTable_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + + private java.lang.Object fkCatalog_ = ""; + /** + *
+       **
+       * The catalog name where the foreign table is.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string fk_catalog = 4; + * @return Whether the fkCatalog field is set. + */ + public boolean hasFkCatalog() { + return ((bitField0_ & 0x00000008) != 0); + } + /** + *
+       **
+       * The catalog name where the foreign table is.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string fk_catalog = 4; + * @return The fkCatalog. + */ + public java.lang.String getFkCatalog() { + java.lang.Object ref = fkCatalog_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + fkCatalog_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       **
+       * The catalog name where the foreign table is.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string fk_catalog = 4; + * @return The bytes for fkCatalog. + */ + public com.google.protobuf.ByteString + getFkCatalogBytes() { + java.lang.Object ref = fkCatalog_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + fkCatalog_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       **
+       * The catalog name where the foreign table is.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string fk_catalog = 4; + * @param value The fkCatalog to set. + * @return This builder for chaining. + */ + public Builder setFkCatalog( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + fkCatalog_ = value; + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + *
+       **
+       * The catalog name where the foreign table is.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string fk_catalog = 4; + * @return This builder for chaining. + */ + public Builder clearFkCatalog() { + fkCatalog_ = getDefaultInstance().getFkCatalog(); + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + return this; + } + /** + *
+       **
+       * The catalog name where the foreign table is.
+       * An empty string retrieves those without a catalog.
+       * If omitted the catalog name should not be used to narrow the search.
+       * 
+ * + * optional string fk_catalog = 4; + * @param value The bytes for fkCatalog to set. + * @return This builder for chaining. + */ + public Builder setFkCatalogBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + fkCatalog_ = value; + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + + private java.lang.Object fkDbSchema_ = ""; + /** + *
+       **
+       * The schema name where the foreign table is.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string fk_db_schema = 5; + * @return Whether the fkDbSchema field is set. + */ + public boolean hasFkDbSchema() { + return ((bitField0_ & 0x00000010) != 0); + } + /** + *
+       **
+       * The schema name where the foreign table is.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string fk_db_schema = 5; + * @return The fkDbSchema. + */ + public java.lang.String getFkDbSchema() { + java.lang.Object ref = fkDbSchema_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + fkDbSchema_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       **
+       * The schema name where the foreign table is.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string fk_db_schema = 5; + * @return The bytes for fkDbSchema. + */ + public com.google.protobuf.ByteString + getFkDbSchemaBytes() { + java.lang.Object ref = fkDbSchema_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + fkDbSchema_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       **
+       * The schema name where the foreign table is.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string fk_db_schema = 5; + * @param value The fkDbSchema to set. + * @return This builder for chaining. + */ + public Builder setFkDbSchema( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + fkDbSchema_ = value; + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + *
+       **
+       * The schema name where the foreign table is.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string fk_db_schema = 5; + * @return This builder for chaining. + */ + public Builder clearFkDbSchema() { + fkDbSchema_ = getDefaultInstance().getFkDbSchema(); + bitField0_ = (bitField0_ & ~0x00000010); + onChanged(); + return this; + } + /** + *
+       **
+       * The schema name where the foreign table is.
+       * An empty string retrieves those without a schema.
+       * If omitted the schema name should not be used to narrow the search.
+       * 
+ * + * optional string fk_db_schema = 5; + * @param value The bytes for fkDbSchema to set. + * @return This builder for chaining. + */ + public Builder setFkDbSchemaBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + fkDbSchema_ = value; + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + + private java.lang.Object fkTable_ = ""; + /** + *
+       **
+       * The foreign table name. It cannot be null.
+       * 
+ * + * string fk_table = 6; + * @return The fkTable. + */ + public java.lang.String getFkTable() { + java.lang.Object ref = fkTable_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + fkTable_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       **
+       * The foreign table name. It cannot be null.
+       * 
+ * + * string fk_table = 6; + * @return The bytes for fkTable. + */ + public com.google.protobuf.ByteString + getFkTableBytes() { + java.lang.Object ref = fkTable_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + fkTable_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       **
+       * The foreign table name. It cannot be null.
+       * 
+ * + * string fk_table = 6; + * @param value The fkTable to set. + * @return This builder for chaining. + */ + public Builder setFkTable( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + fkTable_ = value; + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + /** + *
+       **
+       * The foreign table name. It cannot be null.
+       * 
+ * + * string fk_table = 6; + * @return This builder for chaining. + */ + public Builder clearFkTable() { + fkTable_ = getDefaultInstance().getFkTable(); + bitField0_ = (bitField0_ & ~0x00000020); + onChanged(); + return this; + } + /** + *
+       **
+       * The foreign table name. It cannot be null.
+       * 
+ * + * string fk_table = 6; + * @param value The bytes for fkTable to set. + * @return This builder for chaining. + */ + public Builder setFkTableBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + fkTable_ = value; + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.CommandGetCrossReference) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.CommandGetCrossReference) + private static final org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CommandGetCrossReference parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandGetCrossReference getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface ActionCreatePreparedStatementRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.ActionCreatePreparedStatementRequest) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * The valid SQL string to create a prepared statement for.
+     * 
+ * + * string query = 1; + * @return The query. + */ + java.lang.String getQuery(); + /** + *
+     * The valid SQL string to create a prepared statement for.
+     * 
+ * + * string query = 1; + * @return The bytes for query. + */ + com.google.protobuf.ByteString + getQueryBytes(); + + /** + *
+     * Create/execute the prepared statement as part of this transaction (if
+     * unset, executions of the prepared statement will be auto-committed).
+     * 
+ * + * optional bytes transaction_id = 2; + * @return Whether the transactionId field is set. + */ + boolean hasTransactionId(); + /** + *
+     * Create/execute the prepared statement as part of this transaction (if
+     * unset, executions of the prepared statement will be auto-committed).
+     * 
+ * + * optional bytes transaction_id = 2; + * @return The transactionId. + */ + com.google.protobuf.ByteString getTransactionId(); + } + /** + *
+   *
+   * Request message for the "CreatePreparedStatement" action on a Flight SQL enabled backend.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionCreatePreparedStatementRequest} + */ + public static final class ActionCreatePreparedStatementRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.ActionCreatePreparedStatementRequest) + ActionCreatePreparedStatementRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use ActionCreatePreparedStatementRequest.newBuilder() to construct. + private ActionCreatePreparedStatementRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ActionCreatePreparedStatementRequest() { + query_ = ""; + transactionId_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new ActionCreatePreparedStatementRequest(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCreatePreparedStatementRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCreatePreparedStatementRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest.Builder.class); + } + + private int bitField0_; + public static final int QUERY_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object query_ = ""; + /** + *
+     * The valid SQL string to create a prepared statement for.
+     * 
+ * + * string query = 1; + * @return The query. + */ + @java.lang.Override + public java.lang.String getQuery() { + java.lang.Object ref = query_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + query_ = s; + return s; + } + } + /** + *
+     * The valid SQL string to create a prepared statement for.
+     * 
+ * + * string query = 1; + * @return The bytes for query. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getQueryBytes() { + java.lang.Object ref = query_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + query_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TRANSACTION_ID_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString transactionId_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     * Create/execute the prepared statement as part of this transaction (if
+     * unset, executions of the prepared statement will be auto-committed).
+     * 
+ * + * optional bytes transaction_id = 2; + * @return Whether the transactionId field is set. + */ + @java.lang.Override + public boolean hasTransactionId() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+     * Create/execute the prepared statement as part of this transaction (if
+     * unset, executions of the prepared statement will be auto-committed).
+     * 
+ * + * optional bytes transaction_id = 2; + * @return The transactionId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTransactionId() { + return transactionId_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(query_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, query_); + } + if (((bitField0_ & 0x00000001) != 0)) { + output.writeBytes(2, transactionId_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(query_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, query_); + } + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, transactionId_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest other = (org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest) obj; + + if (!getQuery() + .equals(other.getQuery())) return false; + if (hasTransactionId() != other.hasTransactionId()) return false; + if (hasTransactionId()) { + if (!getTransactionId() + .equals(other.getTransactionId())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + QUERY_FIELD_NUMBER; + hash = (53 * hash) + getQuery().hashCode(); + if (hasTransactionId()) { + hash = (37 * hash) + TRANSACTION_ID_FIELD_NUMBER; + hash = (53 * hash) + getTransactionId().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Request message for the "CreatePreparedStatement" action on a Flight SQL enabled backend.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionCreatePreparedStatementRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.ActionCreatePreparedStatementRequest) + org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCreatePreparedStatementRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCreatePreparedStatementRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + query_ = ""; + transactionId_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCreatePreparedStatementRequest_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest build() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest result = new org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.query_ = query_; + } + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000002) != 0)) { + result.transactionId_ = transactionId_; + to_bitField0_ |= 0x00000001; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest.getDefaultInstance()) return this; + if (!other.getQuery().isEmpty()) { + query_ = other.query_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.hasTransactionId()) { + setTransactionId(other.getTransactionId()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + query_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + transactionId_ = input.readBytes(); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object query_ = ""; + /** + *
+       * The valid SQL string to create a prepared statement for.
+       * 
+ * + * string query = 1; + * @return The query. + */ + public java.lang.String getQuery() { + java.lang.Object ref = query_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + query_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * The valid SQL string to create a prepared statement for.
+       * 
+ * + * string query = 1; + * @return The bytes for query. + */ + public com.google.protobuf.ByteString + getQueryBytes() { + java.lang.Object ref = query_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + query_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * The valid SQL string to create a prepared statement for.
+       * 
+ * + * string query = 1; + * @param value The query to set. + * @return This builder for chaining. + */ + public Builder setQuery( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + query_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * The valid SQL string to create a prepared statement for.
+       * 
+ * + * string query = 1; + * @return This builder for chaining. + */ + public Builder clearQuery() { + query_ = getDefaultInstance().getQuery(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + *
+       * The valid SQL string to create a prepared statement for.
+       * 
+ * + * string query = 1; + * @param value The bytes for query to set. + * @return This builder for chaining. + */ + public Builder setQueryBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + query_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private com.google.protobuf.ByteString transactionId_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * Create/execute the prepared statement as part of this transaction (if
+       * unset, executions of the prepared statement will be auto-committed).
+       * 
+ * + * optional bytes transaction_id = 2; + * @return Whether the transactionId field is set. + */ + @java.lang.Override + public boolean hasTransactionId() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+       * Create/execute the prepared statement as part of this transaction (if
+       * unset, executions of the prepared statement will be auto-committed).
+       * 
+ * + * optional bytes transaction_id = 2; + * @return The transactionId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTransactionId() { + return transactionId_; + } + /** + *
+       * Create/execute the prepared statement as part of this transaction (if
+       * unset, executions of the prepared statement will be auto-committed).
+       * 
+ * + * optional bytes transaction_id = 2; + * @param value The transactionId to set. + * @return This builder for chaining. + */ + public Builder setTransactionId(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + transactionId_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       * Create/execute the prepared statement as part of this transaction (if
+       * unset, executions of the prepared statement will be auto-committed).
+       * 
+ * + * optional bytes transaction_id = 2; + * @return This builder for chaining. + */ + public Builder clearTransactionId() { + bitField0_ = (bitField0_ & ~0x00000002); + transactionId_ = getDefaultInstance().getTransactionId(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.ActionCreatePreparedStatementRequest) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.ActionCreatePreparedStatementRequest) + private static final org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ActionCreatePreparedStatementRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface SubstraitPlanOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.SubstraitPlan) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * The serialized substrait.Plan to create a prepared statement for.
+     * XXX(ARROW-16902): this is bytes instead of an embedded message
+     * because Protobuf does not really support one DLL using Protobuf
+     * definitions from another DLL.
+     * 
+ * + * bytes plan = 1; + * @return The plan. + */ + com.google.protobuf.ByteString getPlan(); + + /** + *
+     * The Substrait release, e.g. "0.12.0". This information is not
+     * tracked in the plan itself, so this is the only way for consumers
+     * to potentially know if they can handle the plan.
+     * 
+ * + * string version = 2; + * @return The version. + */ + java.lang.String getVersion(); + /** + *
+     * The Substrait release, e.g. "0.12.0". This information is not
+     * tracked in the plan itself, so this is the only way for consumers
+     * to potentially know if they can handle the plan.
+     * 
+ * + * string version = 2; + * @return The bytes for version. + */ + com.google.protobuf.ByteString + getVersionBytes(); + } + /** + *
+   *
+   * An embedded message describing a Substrait plan to execute.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.SubstraitPlan} + */ + public static final class SubstraitPlan extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.SubstraitPlan) + SubstraitPlanOrBuilder { + private static final long serialVersionUID = 0L; + // Use SubstraitPlan.newBuilder() to construct. + private SubstraitPlan(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private SubstraitPlan() { + plan_ = com.google.protobuf.ByteString.EMPTY; + version_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new SubstraitPlan(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_SubstraitPlan_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_SubstraitPlan_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.class, org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.Builder.class); + } + + public static final int PLAN_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString plan_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     * The serialized substrait.Plan to create a prepared statement for.
+     * XXX(ARROW-16902): this is bytes instead of an embedded message
+     * because Protobuf does not really support one DLL using Protobuf
+     * definitions from another DLL.
+     * 
+ * + * bytes plan = 1; + * @return The plan. + */ + @java.lang.Override + public com.google.protobuf.ByteString getPlan() { + return plan_; + } + + public static final int VERSION_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private volatile java.lang.Object version_ = ""; + /** + *
+     * The Substrait release, e.g. "0.12.0". This information is not
+     * tracked in the plan itself, so this is the only way for consumers
+     * to potentially know if they can handle the plan.
+     * 
+ * + * string version = 2; + * @return The version. + */ + @java.lang.Override + public java.lang.String getVersion() { + java.lang.Object ref = version_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + version_ = s; + return s; + } + } + /** + *
+     * The Substrait release, e.g. "0.12.0". This information is not
+     * tracked in the plan itself, so this is the only way for consumers
+     * to potentially know if they can handle the plan.
+     * 
+ * + * string version = 2; + * @return The bytes for version. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getVersionBytes() { + java.lang.Object ref = version_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + version_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!plan_.isEmpty()) { + output.writeBytes(1, plan_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, version_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!plan_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, plan_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, version_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan other = (org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan) obj; + + if (!getPlan() + .equals(other.getPlan())) return false; + if (!getVersion() + .equals(other.getVersion())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + PLAN_FIELD_NUMBER; + hash = (53 * hash) + getPlan().hashCode(); + hash = (37 * hash) + VERSION_FIELD_NUMBER; + hash = (53 * hash) + getVersion().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * An embedded message describing a Substrait plan to execute.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.SubstraitPlan} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.SubstraitPlan) + org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlanOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_SubstraitPlan_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_SubstraitPlan_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.class, org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + plan_ = com.google.protobuf.ByteString.EMPTY; + version_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_SubstraitPlan_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan build() { + org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan result = new org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.plan_ = plan_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.version_ = version_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.getDefaultInstance()) return this; + if (other.getPlan() != com.google.protobuf.ByteString.EMPTY) { + setPlan(other.getPlan()); + } + if (!other.getVersion().isEmpty()) { + version_ = other.version_; + bitField0_ |= 0x00000002; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + plan_ = input.readBytes(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + version_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.ByteString plan_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * The serialized substrait.Plan to create a prepared statement for.
+       * XXX(ARROW-16902): this is bytes instead of an embedded message
+       * because Protobuf does not really support one DLL using Protobuf
+       * definitions from another DLL.
+       * 
+ * + * bytes plan = 1; + * @return The plan. + */ + @java.lang.Override + public com.google.protobuf.ByteString getPlan() { + return plan_; + } + /** + *
+       * The serialized substrait.Plan to create a prepared statement for.
+       * XXX(ARROW-16902): this is bytes instead of an embedded message
+       * because Protobuf does not really support one DLL using Protobuf
+       * definitions from another DLL.
+       * 
+ * + * bytes plan = 1; + * @param value The plan to set. + * @return This builder for chaining. + */ + public Builder setPlan(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + plan_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * The serialized substrait.Plan to create a prepared statement for.
+       * XXX(ARROW-16902): this is bytes instead of an embedded message
+       * because Protobuf does not really support one DLL using Protobuf
+       * definitions from another DLL.
+       * 
+ * + * bytes plan = 1; + * @return This builder for chaining. + */ + public Builder clearPlan() { + bitField0_ = (bitField0_ & ~0x00000001); + plan_ = getDefaultInstance().getPlan(); + onChanged(); + return this; + } + + private java.lang.Object version_ = ""; + /** + *
+       * The Substrait release, e.g. "0.12.0". This information is not
+       * tracked in the plan itself, so this is the only way for consumers
+       * to potentially know if they can handle the plan.
+       * 
+ * + * string version = 2; + * @return The version. + */ + public java.lang.String getVersion() { + java.lang.Object ref = version_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + version_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * The Substrait release, e.g. "0.12.0". This information is not
+       * tracked in the plan itself, so this is the only way for consumers
+       * to potentially know if they can handle the plan.
+       * 
+ * + * string version = 2; + * @return The bytes for version. + */ + public com.google.protobuf.ByteString + getVersionBytes() { + java.lang.Object ref = version_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + version_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * The Substrait release, e.g. "0.12.0". This information is not
+       * tracked in the plan itself, so this is the only way for consumers
+       * to potentially know if they can handle the plan.
+       * 
+ * + * string version = 2; + * @param value The version to set. + * @return This builder for chaining. + */ + public Builder setVersion( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + version_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       * The Substrait release, e.g. "0.12.0". This information is not
+       * tracked in the plan itself, so this is the only way for consumers
+       * to potentially know if they can handle the plan.
+       * 
+ * + * string version = 2; + * @return This builder for chaining. + */ + public Builder clearVersion() { + version_ = getDefaultInstance().getVersion(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + *
+       * The Substrait release, e.g. "0.12.0". This information is not
+       * tracked in the plan itself, so this is the only way for consumers
+       * to potentially know if they can handle the plan.
+       * 
+ * + * string version = 2; + * @param value The bytes for version to set. + * @return This builder for chaining. + */ + public Builder setVersionBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + version_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.SubstraitPlan) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.SubstraitPlan) + private static final org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public SubstraitPlan parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface ActionCreatePreparedSubstraitPlanRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.ActionCreatePreparedSubstraitPlanRequest) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * The serialized substrait.Plan to create a prepared statement for.
+     * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + * @return Whether the plan field is set. + */ + boolean hasPlan(); + /** + *
+     * The serialized substrait.Plan to create a prepared statement for.
+     * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + * @return The plan. + */ + org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan getPlan(); + /** + *
+     * The serialized substrait.Plan to create a prepared statement for.
+     * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + */ + org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlanOrBuilder getPlanOrBuilder(); + + /** + *
+     * Create/execute the prepared statement as part of this transaction (if
+     * unset, executions of the prepared statement will be auto-committed).
+     * 
+ * + * optional bytes transaction_id = 2; + * @return Whether the transactionId field is set. + */ + boolean hasTransactionId(); + /** + *
+     * Create/execute the prepared statement as part of this transaction (if
+     * unset, executions of the prepared statement will be auto-committed).
+     * 
+ * + * optional bytes transaction_id = 2; + * @return The transactionId. + */ + com.google.protobuf.ByteString getTransactionId(); + } + /** + *
+   *
+   * Request message for the "CreatePreparedSubstraitPlan" action on a Flight SQL enabled backend.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionCreatePreparedSubstraitPlanRequest} + */ + public static final class ActionCreatePreparedSubstraitPlanRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.ActionCreatePreparedSubstraitPlanRequest) + ActionCreatePreparedSubstraitPlanRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use ActionCreatePreparedSubstraitPlanRequest.newBuilder() to construct. + private ActionCreatePreparedSubstraitPlanRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ActionCreatePreparedSubstraitPlanRequest() { + transactionId_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new ActionCreatePreparedSubstraitPlanRequest(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCreatePreparedSubstraitPlanRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCreatePreparedSubstraitPlanRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest.Builder.class); + } + + private int bitField0_; + public static final int PLAN_FIELD_NUMBER = 1; + private org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan plan_; + /** + *
+     * The serialized substrait.Plan to create a prepared statement for.
+     * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + * @return Whether the plan field is set. + */ + @java.lang.Override + public boolean hasPlan() { + return plan_ != null; + } + /** + *
+     * The serialized substrait.Plan to create a prepared statement for.
+     * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + * @return The plan. + */ + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan getPlan() { + return plan_ == null ? org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.getDefaultInstance() : plan_; + } + /** + *
+     * The serialized substrait.Plan to create a prepared statement for.
+     * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + */ + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlanOrBuilder getPlanOrBuilder() { + return plan_ == null ? org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.getDefaultInstance() : plan_; + } + + public static final int TRANSACTION_ID_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString transactionId_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     * Create/execute the prepared statement as part of this transaction (if
+     * unset, executions of the prepared statement will be auto-committed).
+     * 
+ * + * optional bytes transaction_id = 2; + * @return Whether the transactionId field is set. + */ + @java.lang.Override + public boolean hasTransactionId() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+     * Create/execute the prepared statement as part of this transaction (if
+     * unset, executions of the prepared statement will be auto-committed).
+     * 
+ * + * optional bytes transaction_id = 2; + * @return The transactionId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTransactionId() { + return transactionId_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (plan_ != null) { + output.writeMessage(1, getPlan()); + } + if (((bitField0_ & 0x00000001) != 0)) { + output.writeBytes(2, transactionId_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (plan_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getPlan()); + } + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, transactionId_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest other = (org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest) obj; + + if (hasPlan() != other.hasPlan()) return false; + if (hasPlan()) { + if (!getPlan() + .equals(other.getPlan())) return false; + } + if (hasTransactionId() != other.hasTransactionId()) return false; + if (hasTransactionId()) { + if (!getTransactionId() + .equals(other.getTransactionId())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasPlan()) { + hash = (37 * hash) + PLAN_FIELD_NUMBER; + hash = (53 * hash) + getPlan().hashCode(); + } + if (hasTransactionId()) { + hash = (37 * hash) + TRANSACTION_ID_FIELD_NUMBER; + hash = (53 * hash) + getTransactionId().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Request message for the "CreatePreparedSubstraitPlan" action on a Flight SQL enabled backend.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionCreatePreparedSubstraitPlanRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.ActionCreatePreparedSubstraitPlanRequest) + org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCreatePreparedSubstraitPlanRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCreatePreparedSubstraitPlanRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + plan_ = null; + if (planBuilder_ != null) { + planBuilder_.dispose(); + planBuilder_ = null; + } + transactionId_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCreatePreparedSubstraitPlanRequest_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest build() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest result = new org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.plan_ = planBuilder_ == null + ? plan_ + : planBuilder_.build(); + } + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000002) != 0)) { + result.transactionId_ = transactionId_; + to_bitField0_ |= 0x00000001; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest.getDefaultInstance()) return this; + if (other.hasPlan()) { + mergePlan(other.getPlan()); + } + if (other.hasTransactionId()) { + setTransactionId(other.getTransactionId()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + input.readMessage( + getPlanFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + transactionId_ = input.readBytes(); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan plan_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan, org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.Builder, org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlanOrBuilder> planBuilder_; + /** + *
+       * The serialized substrait.Plan to create a prepared statement for.
+       * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + * @return Whether the plan field is set. + */ + public boolean hasPlan() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+       * The serialized substrait.Plan to create a prepared statement for.
+       * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + * @return The plan. + */ + public org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan getPlan() { + if (planBuilder_ == null) { + return plan_ == null ? org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.getDefaultInstance() : plan_; + } else { + return planBuilder_.getMessage(); + } + } + /** + *
+       * The serialized substrait.Plan to create a prepared statement for.
+       * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + */ + public Builder setPlan(org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan value) { + if (planBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + plan_ = value; + } else { + planBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * The serialized substrait.Plan to create a prepared statement for.
+       * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + */ + public Builder setPlan( + org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.Builder builderForValue) { + if (planBuilder_ == null) { + plan_ = builderForValue.build(); + } else { + planBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * The serialized substrait.Plan to create a prepared statement for.
+       * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + */ + public Builder mergePlan(org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan value) { + if (planBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0) && + plan_ != null && + plan_ != org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.getDefaultInstance()) { + getPlanBuilder().mergeFrom(value); + } else { + plan_ = value; + } + } else { + planBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * The serialized substrait.Plan to create a prepared statement for.
+       * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + */ + public Builder clearPlan() { + bitField0_ = (bitField0_ & ~0x00000001); + plan_ = null; + if (planBuilder_ != null) { + planBuilder_.dispose(); + planBuilder_ = null; + } + onChanged(); + return this; + } + /** + *
+       * The serialized substrait.Plan to create a prepared statement for.
+       * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + */ + public org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.Builder getPlanBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getPlanFieldBuilder().getBuilder(); + } + /** + *
+       * The serialized substrait.Plan to create a prepared statement for.
+       * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + */ + public org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlanOrBuilder getPlanOrBuilder() { + if (planBuilder_ != null) { + return planBuilder_.getMessageOrBuilder(); + } else { + return plan_ == null ? + org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.getDefaultInstance() : plan_; + } + } + /** + *
+       * The serialized substrait.Plan to create a prepared statement for.
+       * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan, org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.Builder, org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlanOrBuilder> + getPlanFieldBuilder() { + if (planBuilder_ == null) { + planBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan, org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.Builder, org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlanOrBuilder>( + getPlan(), + getParentForChildren(), + isClean()); + plan_ = null; + } + return planBuilder_; + } + + private com.google.protobuf.ByteString transactionId_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * Create/execute the prepared statement as part of this transaction (if
+       * unset, executions of the prepared statement will be auto-committed).
+       * 
+ * + * optional bytes transaction_id = 2; + * @return Whether the transactionId field is set. + */ + @java.lang.Override + public boolean hasTransactionId() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+       * Create/execute the prepared statement as part of this transaction (if
+       * unset, executions of the prepared statement will be auto-committed).
+       * 
+ * + * optional bytes transaction_id = 2; + * @return The transactionId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTransactionId() { + return transactionId_; + } + /** + *
+       * Create/execute the prepared statement as part of this transaction (if
+       * unset, executions of the prepared statement will be auto-committed).
+       * 
+ * + * optional bytes transaction_id = 2; + * @param value The transactionId to set. + * @return This builder for chaining. + */ + public Builder setTransactionId(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + transactionId_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       * Create/execute the prepared statement as part of this transaction (if
+       * unset, executions of the prepared statement will be auto-committed).
+       * 
+ * + * optional bytes transaction_id = 2; + * @return This builder for chaining. + */ + public Builder clearTransactionId() { + bitField0_ = (bitField0_ & ~0x00000002); + transactionId_ = getDefaultInstance().getTransactionId(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.ActionCreatePreparedSubstraitPlanRequest) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.ActionCreatePreparedSubstraitPlanRequest) + private static final org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ActionCreatePreparedSubstraitPlanRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedSubstraitPlanRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface ActionCreatePreparedStatementResultOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.ActionCreatePreparedStatementResult) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * Opaque handle for the prepared statement on the server.
+     * 
+ * + * bytes prepared_statement_handle = 1; + * @return The preparedStatementHandle. + */ + com.google.protobuf.ByteString getPreparedStatementHandle(); + + /** + *
+     * If a result set generating query was provided, dataset_schema contains the
+     * schema of the result set.  It should be an IPC-encapsulated Schema, as described in Schema.fbs.
+     * For some queries, the schema of the results may depend on the schema of the parameters.  The server
+     * should provide its best guess as to the schema at this point.  Clients must not assume that this
+     * schema, if provided, will be accurate.
+     * 
+ * + * bytes dataset_schema = 2; + * @return The datasetSchema. + */ + com.google.protobuf.ByteString getDatasetSchema(); + + /** + *
+     * If the query provided contained parameters, parameter_schema contains the
+     * schema of the expected parameters.  It should be an IPC-encapsulated Schema, as described in Schema.fbs.
+     * 
+ * + * bytes parameter_schema = 3; + * @return The parameterSchema. + */ + com.google.protobuf.ByteString getParameterSchema(); + } + /** + *
+   *
+   * Wrap the result of a "CreatePreparedStatement" or "CreatePreparedSubstraitPlan" action.
+   *
+   * The resultant PreparedStatement can be closed either:
+   * - Manually, through the "ClosePreparedStatement" action;
+   * - Automatically, by a server timeout.
+   *
+   * The result should be wrapped in a google.protobuf.Any message.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionCreatePreparedStatementResult} + */ + public static final class ActionCreatePreparedStatementResult extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.ActionCreatePreparedStatementResult) + ActionCreatePreparedStatementResultOrBuilder { + private static final long serialVersionUID = 0L; + // Use ActionCreatePreparedStatementResult.newBuilder() to construct. + private ActionCreatePreparedStatementResult(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ActionCreatePreparedStatementResult() { + preparedStatementHandle_ = com.google.protobuf.ByteString.EMPTY; + datasetSchema_ = com.google.protobuf.ByteString.EMPTY; + parameterSchema_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new ActionCreatePreparedStatementResult(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCreatePreparedStatementResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCreatePreparedStatementResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult.Builder.class); + } + + public static final int PREPARED_STATEMENT_HANDLE_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString preparedStatementHandle_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     * Opaque handle for the prepared statement on the server.
+     * 
+ * + * bytes prepared_statement_handle = 1; + * @return The preparedStatementHandle. + */ + @java.lang.Override + public com.google.protobuf.ByteString getPreparedStatementHandle() { + return preparedStatementHandle_; + } + + public static final int DATASET_SCHEMA_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString datasetSchema_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     * If a result set generating query was provided, dataset_schema contains the
+     * schema of the result set.  It should be an IPC-encapsulated Schema, as described in Schema.fbs.
+     * For some queries, the schema of the results may depend on the schema of the parameters.  The server
+     * should provide its best guess as to the schema at this point.  Clients must not assume that this
+     * schema, if provided, will be accurate.
+     * 
+ * + * bytes dataset_schema = 2; + * @return The datasetSchema. + */ + @java.lang.Override + public com.google.protobuf.ByteString getDatasetSchema() { + return datasetSchema_; + } + + public static final int PARAMETER_SCHEMA_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString parameterSchema_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     * If the query provided contained parameters, parameter_schema contains the
+     * schema of the expected parameters.  It should be an IPC-encapsulated Schema, as described in Schema.fbs.
+     * 
+ * + * bytes parameter_schema = 3; + * @return The parameterSchema. + */ + @java.lang.Override + public com.google.protobuf.ByteString getParameterSchema() { + return parameterSchema_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!preparedStatementHandle_.isEmpty()) { + output.writeBytes(1, preparedStatementHandle_); + } + if (!datasetSchema_.isEmpty()) { + output.writeBytes(2, datasetSchema_); + } + if (!parameterSchema_.isEmpty()) { + output.writeBytes(3, parameterSchema_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!preparedStatementHandle_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, preparedStatementHandle_); + } + if (!datasetSchema_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, datasetSchema_); + } + if (!parameterSchema_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, parameterSchema_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult other = (org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult) obj; + + if (!getPreparedStatementHandle() + .equals(other.getPreparedStatementHandle())) return false; + if (!getDatasetSchema() + .equals(other.getDatasetSchema())) return false; + if (!getParameterSchema() + .equals(other.getParameterSchema())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + PREPARED_STATEMENT_HANDLE_FIELD_NUMBER; + hash = (53 * hash) + getPreparedStatementHandle().hashCode(); + hash = (37 * hash) + DATASET_SCHEMA_FIELD_NUMBER; + hash = (53 * hash) + getDatasetSchema().hashCode(); + hash = (37 * hash) + PARAMETER_SCHEMA_FIELD_NUMBER; + hash = (53 * hash) + getParameterSchema().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Wrap the result of a "CreatePreparedStatement" or "CreatePreparedSubstraitPlan" action.
+     *
+     * The resultant PreparedStatement can be closed either:
+     * - Manually, through the "ClosePreparedStatement" action;
+     * - Automatically, by a server timeout.
+     *
+     * The result should be wrapped in a google.protobuf.Any message.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionCreatePreparedStatementResult} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.ActionCreatePreparedStatementResult) + org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCreatePreparedStatementResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCreatePreparedStatementResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + preparedStatementHandle_ = com.google.protobuf.ByteString.EMPTY; + datasetSchema_ = com.google.protobuf.ByteString.EMPTY; + parameterSchema_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCreatePreparedStatementResult_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult build() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult result = new org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.preparedStatementHandle_ = preparedStatementHandle_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.datasetSchema_ = datasetSchema_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.parameterSchema_ = parameterSchema_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult.getDefaultInstance()) return this; + if (other.getPreparedStatementHandle() != com.google.protobuf.ByteString.EMPTY) { + setPreparedStatementHandle(other.getPreparedStatementHandle()); + } + if (other.getDatasetSchema() != com.google.protobuf.ByteString.EMPTY) { + setDatasetSchema(other.getDatasetSchema()); + } + if (other.getParameterSchema() != com.google.protobuf.ByteString.EMPTY) { + setParameterSchema(other.getParameterSchema()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + preparedStatementHandle_ = input.readBytes(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + datasetSchema_ = input.readBytes(); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: { + parameterSchema_ = input.readBytes(); + bitField0_ |= 0x00000004; + break; + } // case 26 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.ByteString preparedStatementHandle_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * Opaque handle for the prepared statement on the server.
+       * 
+ * + * bytes prepared_statement_handle = 1; + * @return The preparedStatementHandle. + */ + @java.lang.Override + public com.google.protobuf.ByteString getPreparedStatementHandle() { + return preparedStatementHandle_; + } + /** + *
+       * Opaque handle for the prepared statement on the server.
+       * 
+ * + * bytes prepared_statement_handle = 1; + * @param value The preparedStatementHandle to set. + * @return This builder for chaining. + */ + public Builder setPreparedStatementHandle(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + preparedStatementHandle_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * Opaque handle for the prepared statement on the server.
+       * 
+ * + * bytes prepared_statement_handle = 1; + * @return This builder for chaining. + */ + public Builder clearPreparedStatementHandle() { + bitField0_ = (bitField0_ & ~0x00000001); + preparedStatementHandle_ = getDefaultInstance().getPreparedStatementHandle(); + onChanged(); + return this; + } + + private com.google.protobuf.ByteString datasetSchema_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * If a result set generating query was provided, dataset_schema contains the
+       * schema of the result set.  It should be an IPC-encapsulated Schema, as described in Schema.fbs.
+       * For some queries, the schema of the results may depend on the schema of the parameters.  The server
+       * should provide its best guess as to the schema at this point.  Clients must not assume that this
+       * schema, if provided, will be accurate.
+       * 
+ * + * bytes dataset_schema = 2; + * @return The datasetSchema. + */ + @java.lang.Override + public com.google.protobuf.ByteString getDatasetSchema() { + return datasetSchema_; + } + /** + *
+       * If a result set generating query was provided, dataset_schema contains the
+       * schema of the result set.  It should be an IPC-encapsulated Schema, as described in Schema.fbs.
+       * For some queries, the schema of the results may depend on the schema of the parameters.  The server
+       * should provide its best guess as to the schema at this point.  Clients must not assume that this
+       * schema, if provided, will be accurate.
+       * 
+ * + * bytes dataset_schema = 2; + * @param value The datasetSchema to set. + * @return This builder for chaining. + */ + public Builder setDatasetSchema(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + datasetSchema_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       * If a result set generating query was provided, dataset_schema contains the
+       * schema of the result set.  It should be an IPC-encapsulated Schema, as described in Schema.fbs.
+       * For some queries, the schema of the results may depend on the schema of the parameters.  The server
+       * should provide its best guess as to the schema at this point.  Clients must not assume that this
+       * schema, if provided, will be accurate.
+       * 
+ * + * bytes dataset_schema = 2; + * @return This builder for chaining. + */ + public Builder clearDatasetSchema() { + bitField0_ = (bitField0_ & ~0x00000002); + datasetSchema_ = getDefaultInstance().getDatasetSchema(); + onChanged(); + return this; + } + + private com.google.protobuf.ByteString parameterSchema_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * If the query provided contained parameters, parameter_schema contains the
+       * schema of the expected parameters.  It should be an IPC-encapsulated Schema, as described in Schema.fbs.
+       * 
+ * + * bytes parameter_schema = 3; + * @return The parameterSchema. + */ + @java.lang.Override + public com.google.protobuf.ByteString getParameterSchema() { + return parameterSchema_; + } + /** + *
+       * If the query provided contained parameters, parameter_schema contains the
+       * schema of the expected parameters.  It should be an IPC-encapsulated Schema, as described in Schema.fbs.
+       * 
+ * + * bytes parameter_schema = 3; + * @param value The parameterSchema to set. + * @return This builder for chaining. + */ + public Builder setParameterSchema(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + parameterSchema_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       * If the query provided contained parameters, parameter_schema contains the
+       * schema of the expected parameters.  It should be an IPC-encapsulated Schema, as described in Schema.fbs.
+       * 
+ * + * bytes parameter_schema = 3; + * @return This builder for chaining. + */ + public Builder clearParameterSchema() { + bitField0_ = (bitField0_ & ~0x00000004); + parameterSchema_ = getDefaultInstance().getParameterSchema(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.ActionCreatePreparedStatementResult) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.ActionCreatePreparedStatementResult) + private static final org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ActionCreatePreparedStatementResult parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionCreatePreparedStatementResult getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface ActionClosePreparedStatementRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.ActionClosePreparedStatementRequest) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * Opaque handle for the prepared statement on the server.
+     * 
+ * + * bytes prepared_statement_handle = 1; + * @return The preparedStatementHandle. + */ + com.google.protobuf.ByteString getPreparedStatementHandle(); + } + /** + *
+   *
+   * Request message for the "ClosePreparedStatement" action on a Flight SQL enabled backend.
+   * Closes server resources associated with the prepared statement handle.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionClosePreparedStatementRequest} + */ + public static final class ActionClosePreparedStatementRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.ActionClosePreparedStatementRequest) + ActionClosePreparedStatementRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use ActionClosePreparedStatementRequest.newBuilder() to construct. + private ActionClosePreparedStatementRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ActionClosePreparedStatementRequest() { + preparedStatementHandle_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new ActionClosePreparedStatementRequest(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionClosePreparedStatementRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionClosePreparedStatementRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest.Builder.class); + } + + public static final int PREPARED_STATEMENT_HANDLE_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString preparedStatementHandle_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     * Opaque handle for the prepared statement on the server.
+     * 
+ * + * bytes prepared_statement_handle = 1; + * @return The preparedStatementHandle. + */ + @java.lang.Override + public com.google.protobuf.ByteString getPreparedStatementHandle() { + return preparedStatementHandle_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!preparedStatementHandle_.isEmpty()) { + output.writeBytes(1, preparedStatementHandle_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!preparedStatementHandle_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, preparedStatementHandle_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest other = (org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest) obj; + + if (!getPreparedStatementHandle() + .equals(other.getPreparedStatementHandle())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + PREPARED_STATEMENT_HANDLE_FIELD_NUMBER; + hash = (53 * hash) + getPreparedStatementHandle().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Request message for the "ClosePreparedStatement" action on a Flight SQL enabled backend.
+     * Closes server resources associated with the prepared statement handle.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionClosePreparedStatementRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.ActionClosePreparedStatementRequest) + org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionClosePreparedStatementRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionClosePreparedStatementRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + preparedStatementHandle_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionClosePreparedStatementRequest_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest build() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest result = new org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.preparedStatementHandle_ = preparedStatementHandle_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest.getDefaultInstance()) return this; + if (other.getPreparedStatementHandle() != com.google.protobuf.ByteString.EMPTY) { + setPreparedStatementHandle(other.getPreparedStatementHandle()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + preparedStatementHandle_ = input.readBytes(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.ByteString preparedStatementHandle_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * Opaque handle for the prepared statement on the server.
+       * 
+ * + * bytes prepared_statement_handle = 1; + * @return The preparedStatementHandle. + */ + @java.lang.Override + public com.google.protobuf.ByteString getPreparedStatementHandle() { + return preparedStatementHandle_; + } + /** + *
+       * Opaque handle for the prepared statement on the server.
+       * 
+ * + * bytes prepared_statement_handle = 1; + * @param value The preparedStatementHandle to set. + * @return This builder for chaining. + */ + public Builder setPreparedStatementHandle(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + preparedStatementHandle_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * Opaque handle for the prepared statement on the server.
+       * 
+ * + * bytes prepared_statement_handle = 1; + * @return This builder for chaining. + */ + public Builder clearPreparedStatementHandle() { + bitField0_ = (bitField0_ & ~0x00000001); + preparedStatementHandle_ = getDefaultInstance().getPreparedStatementHandle(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.ActionClosePreparedStatementRequest) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.ActionClosePreparedStatementRequest) + private static final org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ActionClosePreparedStatementRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionClosePreparedStatementRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface ActionBeginTransactionRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.ActionBeginTransactionRequest) + com.google.protobuf.MessageOrBuilder { + } + /** + *
+   *
+   * Request message for the "BeginTransaction" action.
+   * Begins a transaction.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionBeginTransactionRequest} + */ + public static final class ActionBeginTransactionRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.ActionBeginTransactionRequest) + ActionBeginTransactionRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use ActionBeginTransactionRequest.newBuilder() to construct. + private ActionBeginTransactionRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ActionBeginTransactionRequest() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new ActionBeginTransactionRequest(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionBeginTransactionRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionBeginTransactionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest.Builder.class); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest other = (org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest) obj; + + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Request message for the "BeginTransaction" action.
+     * Begins a transaction.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionBeginTransactionRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.ActionBeginTransactionRequest) + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionBeginTransactionRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionBeginTransactionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionBeginTransactionRequest_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest build() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest result = new org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest(this); + onBuilt(); + return result; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.ActionBeginTransactionRequest) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.ActionBeginTransactionRequest) + private static final org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ActionBeginTransactionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface ActionBeginSavepointRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.ActionBeginSavepointRequest) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * The transaction to which a savepoint belongs.
+     * 
+ * + * bytes transaction_id = 1; + * @return The transactionId. + */ + com.google.protobuf.ByteString getTransactionId(); + + /** + *
+     * Name for the savepoint.
+     * 
+ * + * string name = 2; + * @return The name. + */ + java.lang.String getName(); + /** + *
+     * Name for the savepoint.
+     * 
+ * + * string name = 2; + * @return The bytes for name. + */ + com.google.protobuf.ByteString + getNameBytes(); + } + /** + *
+   *
+   * Request message for the "BeginSavepoint" action.
+   * Creates a savepoint within a transaction.
+   *
+   * Only supported if FLIGHT_SQL_TRANSACTION is
+   * FLIGHT_SQL_TRANSACTION_SUPPORT_SAVEPOINT.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionBeginSavepointRequest} + */ + public static final class ActionBeginSavepointRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.ActionBeginSavepointRequest) + ActionBeginSavepointRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use ActionBeginSavepointRequest.newBuilder() to construct. + private ActionBeginSavepointRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ActionBeginSavepointRequest() { + transactionId_ = com.google.protobuf.ByteString.EMPTY; + name_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new ActionBeginSavepointRequest(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionBeginSavepointRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionBeginSavepointRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest.Builder.class); + } + + public static final int TRANSACTION_ID_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString transactionId_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     * The transaction to which a savepoint belongs.
+     * 
+ * + * bytes transaction_id = 1; + * @return The transactionId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTransactionId() { + return transactionId_; + } + + public static final int NAME_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + *
+     * Name for the savepoint.
+     * 
+ * + * string name = 2; + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + *
+     * Name for the savepoint.
+     * 
+ * + * string name = 2; + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!transactionId_.isEmpty()) { + output.writeBytes(1, transactionId_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, name_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!transactionId_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, transactionId_); + } + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, name_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest other = (org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest) obj; + + if (!getTransactionId() + .equals(other.getTransactionId())) return false; + if (!getName() + .equals(other.getName())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + TRANSACTION_ID_FIELD_NUMBER; + hash = (53 * hash) + getTransactionId().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Request message for the "BeginSavepoint" action.
+     * Creates a savepoint within a transaction.
+     *
+     * Only supported if FLIGHT_SQL_TRANSACTION is
+     * FLIGHT_SQL_TRANSACTION_SUPPORT_SAVEPOINT.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionBeginSavepointRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.ActionBeginSavepointRequest) + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionBeginSavepointRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionBeginSavepointRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + transactionId_ = com.google.protobuf.ByteString.EMPTY; + name_ = ""; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionBeginSavepointRequest_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest build() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest result = new org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.transactionId_ = transactionId_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.name_ = name_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest.getDefaultInstance()) return this; + if (other.getTransactionId() != com.google.protobuf.ByteString.EMPTY) { + setTransactionId(other.getTransactionId()); + } + if (!other.getName().isEmpty()) { + name_ = other.name_; + bitField0_ |= 0x00000002; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + transactionId_ = input.readBytes(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.ByteString transactionId_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * The transaction to which a savepoint belongs.
+       * 
+ * + * bytes transaction_id = 1; + * @return The transactionId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTransactionId() { + return transactionId_; + } + /** + *
+       * The transaction to which a savepoint belongs.
+       * 
+ * + * bytes transaction_id = 1; + * @param value The transactionId to set. + * @return This builder for chaining. + */ + public Builder setTransactionId(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + transactionId_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * The transaction to which a savepoint belongs.
+       * 
+ * + * bytes transaction_id = 1; + * @return This builder for chaining. + */ + public Builder clearTransactionId() { + bitField0_ = (bitField0_ & ~0x00000001); + transactionId_ = getDefaultInstance().getTransactionId(); + onChanged(); + return this; + } + + private java.lang.Object name_ = ""; + /** + *
+       * Name for the savepoint.
+       * 
+ * + * string name = 2; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Name for the savepoint.
+       * 
+ * + * string name = 2; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Name for the savepoint.
+       * 
+ * + * string name = 2; + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + name_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       * Name for the savepoint.
+       * 
+ * + * string name = 2; + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + *
+       * Name for the savepoint.
+       * 
+ * + * string name = 2; + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.ActionBeginSavepointRequest) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.ActionBeginSavepointRequest) + private static final org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ActionBeginSavepointRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface ActionBeginTransactionResultOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.ActionBeginTransactionResult) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * Opaque handle for the transaction on the server.
+     * 
+ * + * bytes transaction_id = 1; + * @return The transactionId. + */ + com.google.protobuf.ByteString getTransactionId(); + } + /** + *
+   *
+   * The result of a "BeginTransaction" action.
+   *
+   * The transaction can be manipulated with the "EndTransaction" action, or
+   * automatically via server timeout. If the transaction times out, then it is
+   * automatically rolled back.
+   *
+   * The result should be wrapped in a google.protobuf.Any message.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionBeginTransactionResult} + */ + public static final class ActionBeginTransactionResult extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.ActionBeginTransactionResult) + ActionBeginTransactionResultOrBuilder { + private static final long serialVersionUID = 0L; + // Use ActionBeginTransactionResult.newBuilder() to construct. + private ActionBeginTransactionResult(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ActionBeginTransactionResult() { + transactionId_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new ActionBeginTransactionResult(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionBeginTransactionResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionBeginTransactionResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult.Builder.class); + } + + public static final int TRANSACTION_ID_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString transactionId_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     * Opaque handle for the transaction on the server.
+     * 
+ * + * bytes transaction_id = 1; + * @return The transactionId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTransactionId() { + return transactionId_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!transactionId_.isEmpty()) { + output.writeBytes(1, transactionId_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!transactionId_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, transactionId_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult other = (org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult) obj; + + if (!getTransactionId() + .equals(other.getTransactionId())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + TRANSACTION_ID_FIELD_NUMBER; + hash = (53 * hash) + getTransactionId().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * The result of a "BeginTransaction" action.
+     *
+     * The transaction can be manipulated with the "EndTransaction" action, or
+     * automatically via server timeout. If the transaction times out, then it is
+     * automatically rolled back.
+     *
+     * The result should be wrapped in a google.protobuf.Any message.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionBeginTransactionResult} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.ActionBeginTransactionResult) + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionBeginTransactionResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionBeginTransactionResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + transactionId_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionBeginTransactionResult_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult build() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult result = new org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.transactionId_ = transactionId_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult.getDefaultInstance()) return this; + if (other.getTransactionId() != com.google.protobuf.ByteString.EMPTY) { + setTransactionId(other.getTransactionId()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + transactionId_ = input.readBytes(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.ByteString transactionId_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * Opaque handle for the transaction on the server.
+       * 
+ * + * bytes transaction_id = 1; + * @return The transactionId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTransactionId() { + return transactionId_; + } + /** + *
+       * Opaque handle for the transaction on the server.
+       * 
+ * + * bytes transaction_id = 1; + * @param value The transactionId to set. + * @return This builder for chaining. + */ + public Builder setTransactionId(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + transactionId_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * Opaque handle for the transaction on the server.
+       * 
+ * + * bytes transaction_id = 1; + * @return This builder for chaining. + */ + public Builder clearTransactionId() { + bitField0_ = (bitField0_ & ~0x00000001); + transactionId_ = getDefaultInstance().getTransactionId(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.ActionBeginTransactionResult) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.ActionBeginTransactionResult) + private static final org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ActionBeginTransactionResult parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginTransactionResult getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface ActionBeginSavepointResultOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.ActionBeginSavepointResult) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * Opaque handle for the savepoint on the server.
+     * 
+ * + * bytes savepoint_id = 1; + * @return The savepointId. + */ + com.google.protobuf.ByteString getSavepointId(); + } + /** + *
+   *
+   * The result of a "BeginSavepoint" action.
+   *
+   * The transaction can be manipulated with the "EndSavepoint" action.
+   * If the associated transaction is committed, rolled back, or times
+   * out, then the savepoint is also invalidated.
+   *
+   * The result should be wrapped in a google.protobuf.Any message.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionBeginSavepointResult} + */ + public static final class ActionBeginSavepointResult extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.ActionBeginSavepointResult) + ActionBeginSavepointResultOrBuilder { + private static final long serialVersionUID = 0L; + // Use ActionBeginSavepointResult.newBuilder() to construct. + private ActionBeginSavepointResult(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ActionBeginSavepointResult() { + savepointId_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new ActionBeginSavepointResult(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionBeginSavepointResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionBeginSavepointResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult.Builder.class); + } + + public static final int SAVEPOINT_ID_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString savepointId_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     * Opaque handle for the savepoint on the server.
+     * 
+ * + * bytes savepoint_id = 1; + * @return The savepointId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getSavepointId() { + return savepointId_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!savepointId_.isEmpty()) { + output.writeBytes(1, savepointId_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!savepointId_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, savepointId_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult other = (org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult) obj; + + if (!getSavepointId() + .equals(other.getSavepointId())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + SAVEPOINT_ID_FIELD_NUMBER; + hash = (53 * hash) + getSavepointId().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * The result of a "BeginSavepoint" action.
+     *
+     * The transaction can be manipulated with the "EndSavepoint" action.
+     * If the associated transaction is committed, rolled back, or times
+     * out, then the savepoint is also invalidated.
+     *
+     * The result should be wrapped in a google.protobuf.Any message.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionBeginSavepointResult} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.ActionBeginSavepointResult) + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionBeginSavepointResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionBeginSavepointResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + savepointId_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionBeginSavepointResult_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult build() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult result = new org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.savepointId_ = savepointId_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult.getDefaultInstance()) return this; + if (other.getSavepointId() != com.google.protobuf.ByteString.EMPTY) { + setSavepointId(other.getSavepointId()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + savepointId_ = input.readBytes(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.ByteString savepointId_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * Opaque handle for the savepoint on the server.
+       * 
+ * + * bytes savepoint_id = 1; + * @return The savepointId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getSavepointId() { + return savepointId_; + } + /** + *
+       * Opaque handle for the savepoint on the server.
+       * 
+ * + * bytes savepoint_id = 1; + * @param value The savepointId to set. + * @return This builder for chaining. + */ + public Builder setSavepointId(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + savepointId_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * Opaque handle for the savepoint on the server.
+       * 
+ * + * bytes savepoint_id = 1; + * @return This builder for chaining. + */ + public Builder clearSavepointId() { + bitField0_ = (bitField0_ & ~0x00000001); + savepointId_ = getDefaultInstance().getSavepointId(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.ActionBeginSavepointResult) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.ActionBeginSavepointResult) + private static final org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ActionBeginSavepointResult parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionBeginSavepointResult getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface ActionEndTransactionRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.ActionEndTransactionRequest) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * Opaque handle for the transaction on the server.
+     * 
+ * + * bytes transaction_id = 1; + * @return The transactionId. + */ + com.google.protobuf.ByteString getTransactionId(); + + /** + *
+     * Whether to commit/rollback the given transaction.
+     * 
+ * + * .arrow.flight.protocol.sql.ActionEndTransactionRequest.EndTransaction action = 2; + * @return The enum numeric value on the wire for action. + */ + int getActionValue(); + /** + *
+     * Whether to commit/rollback the given transaction.
+     * 
+ * + * .arrow.flight.protocol.sql.ActionEndTransactionRequest.EndTransaction action = 2; + * @return The action. + */ + org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest.EndTransaction getAction(); + } + /** + *
+   *
+   * Request message for the "EndTransaction" action.
+   *
+   * Commit (COMMIT) or rollback (ROLLBACK) the transaction.
+   *
+   * If the action completes successfully, the transaction handle is
+   * invalidated, as are all associated savepoints.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionEndTransactionRequest} + */ + public static final class ActionEndTransactionRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.ActionEndTransactionRequest) + ActionEndTransactionRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use ActionEndTransactionRequest.newBuilder() to construct. + private ActionEndTransactionRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ActionEndTransactionRequest() { + transactionId_ = com.google.protobuf.ByteString.EMPTY; + action_ = 0; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new ActionEndTransactionRequest(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionEndTransactionRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionEndTransactionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest.Builder.class); + } + + /** + * Protobuf enum {@code arrow.flight.protocol.sql.ActionEndTransactionRequest.EndTransaction} + */ + public enum EndTransaction + implements com.google.protobuf.ProtocolMessageEnum { + /** + * END_TRANSACTION_UNSPECIFIED = 0; + */ + END_TRANSACTION_UNSPECIFIED(0), + /** + *
+       * Commit the transaction.
+       * 
+ * + * END_TRANSACTION_COMMIT = 1; + */ + END_TRANSACTION_COMMIT(1), + /** + *
+       * Roll back the transaction.
+       * 
+ * + * END_TRANSACTION_ROLLBACK = 2; + */ + END_TRANSACTION_ROLLBACK(2), + UNRECOGNIZED(-1), + ; + + /** + * END_TRANSACTION_UNSPECIFIED = 0; + */ + public static final int END_TRANSACTION_UNSPECIFIED_VALUE = 0; + /** + *
+       * Commit the transaction.
+       * 
+ * + * END_TRANSACTION_COMMIT = 1; + */ + public static final int END_TRANSACTION_COMMIT_VALUE = 1; + /** + *
+       * Roll back the transaction.
+       * 
+ * + * END_TRANSACTION_ROLLBACK = 2; + */ + public static final int END_TRANSACTION_ROLLBACK_VALUE = 2; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static EndTransaction valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static EndTransaction forNumber(int value) { + switch (value) { + case 0: return END_TRANSACTION_UNSPECIFIED; + case 1: return END_TRANSACTION_COMMIT; + case 2: return END_TRANSACTION_ROLLBACK; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + EndTransaction> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public EndTransaction findValueByNumber(int number) { + return EndTransaction.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest.getDescriptor().getEnumTypes().get(0); + } + + private static final EndTransaction[] VALUES = values(); + + public static EndTransaction valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private EndTransaction(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.ActionEndTransactionRequest.EndTransaction) + } + + public static final int TRANSACTION_ID_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString transactionId_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     * Opaque handle for the transaction on the server.
+     * 
+ * + * bytes transaction_id = 1; + * @return The transactionId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTransactionId() { + return transactionId_; + } + + public static final int ACTION_FIELD_NUMBER = 2; + private int action_ = 0; + /** + *
+     * Whether to commit/rollback the given transaction.
+     * 
+ * + * .arrow.flight.protocol.sql.ActionEndTransactionRequest.EndTransaction action = 2; + * @return The enum numeric value on the wire for action. + */ + @java.lang.Override public int getActionValue() { + return action_; + } + /** + *
+     * Whether to commit/rollback the given transaction.
+     * 
+ * + * .arrow.flight.protocol.sql.ActionEndTransactionRequest.EndTransaction action = 2; + * @return The action. + */ + @java.lang.Override public org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest.EndTransaction getAction() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest.EndTransaction result = org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest.EndTransaction.forNumber(action_); + return result == null ? org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest.EndTransaction.UNRECOGNIZED : result; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!transactionId_.isEmpty()) { + output.writeBytes(1, transactionId_); + } + if (action_ != org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest.EndTransaction.END_TRANSACTION_UNSPECIFIED.getNumber()) { + output.writeEnum(2, action_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!transactionId_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, transactionId_); + } + if (action_ != org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest.EndTransaction.END_TRANSACTION_UNSPECIFIED.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(2, action_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest other = (org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest) obj; + + if (!getTransactionId() + .equals(other.getTransactionId())) return false; + if (action_ != other.action_) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + TRANSACTION_ID_FIELD_NUMBER; + hash = (53 * hash) + getTransactionId().hashCode(); + hash = (37 * hash) + ACTION_FIELD_NUMBER; + hash = (53 * hash) + action_; + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Request message for the "EndTransaction" action.
+     *
+     * Commit (COMMIT) or rollback (ROLLBACK) the transaction.
+     *
+     * If the action completes successfully, the transaction handle is
+     * invalidated, as are all associated savepoints.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionEndTransactionRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.ActionEndTransactionRequest) + org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionEndTransactionRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionEndTransactionRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + transactionId_ = com.google.protobuf.ByteString.EMPTY; + action_ = 0; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionEndTransactionRequest_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest build() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest result = new org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.transactionId_ = transactionId_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.action_ = action_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest.getDefaultInstance()) return this; + if (other.getTransactionId() != com.google.protobuf.ByteString.EMPTY) { + setTransactionId(other.getTransactionId()); + } + if (other.action_ != 0) { + setActionValue(other.getActionValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + transactionId_ = input.readBytes(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 16: { + action_ = input.readEnum(); + bitField0_ |= 0x00000002; + break; + } // case 16 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.ByteString transactionId_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * Opaque handle for the transaction on the server.
+       * 
+ * + * bytes transaction_id = 1; + * @return The transactionId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTransactionId() { + return transactionId_; + } + /** + *
+       * Opaque handle for the transaction on the server.
+       * 
+ * + * bytes transaction_id = 1; + * @param value The transactionId to set. + * @return This builder for chaining. + */ + public Builder setTransactionId(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + transactionId_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * Opaque handle for the transaction on the server.
+       * 
+ * + * bytes transaction_id = 1; + * @return This builder for chaining. + */ + public Builder clearTransactionId() { + bitField0_ = (bitField0_ & ~0x00000001); + transactionId_ = getDefaultInstance().getTransactionId(); + onChanged(); + return this; + } + + private int action_ = 0; + /** + *
+       * Whether to commit/rollback the given transaction.
+       * 
+ * + * .arrow.flight.protocol.sql.ActionEndTransactionRequest.EndTransaction action = 2; + * @return The enum numeric value on the wire for action. + */ + @java.lang.Override public int getActionValue() { + return action_; + } + /** + *
+       * Whether to commit/rollback the given transaction.
+       * 
+ * + * .arrow.flight.protocol.sql.ActionEndTransactionRequest.EndTransaction action = 2; + * @param value The enum numeric value on the wire for action to set. + * @return This builder for chaining. + */ + public Builder setActionValue(int value) { + action_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       * Whether to commit/rollback the given transaction.
+       * 
+ * + * .arrow.flight.protocol.sql.ActionEndTransactionRequest.EndTransaction action = 2; + * @return The action. + */ + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest.EndTransaction getAction() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest.EndTransaction result = org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest.EndTransaction.forNumber(action_); + return result == null ? org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest.EndTransaction.UNRECOGNIZED : result; + } + /** + *
+       * Whether to commit/rollback the given transaction.
+       * 
+ * + * .arrow.flight.protocol.sql.ActionEndTransactionRequest.EndTransaction action = 2; + * @param value The action to set. + * @return This builder for chaining. + */ + public Builder setAction(org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest.EndTransaction value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + action_ = value.getNumber(); + onChanged(); + return this; + } + /** + *
+       * Whether to commit/rollback the given transaction.
+       * 
+ * + * .arrow.flight.protocol.sql.ActionEndTransactionRequest.EndTransaction action = 2; + * @return This builder for chaining. + */ + public Builder clearAction() { + bitField0_ = (bitField0_ & ~0x00000002); + action_ = 0; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.ActionEndTransactionRequest) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.ActionEndTransactionRequest) + private static final org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ActionEndTransactionRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionEndTransactionRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface ActionEndSavepointRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.ActionEndSavepointRequest) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * Opaque handle for the savepoint on the server.
+     * 
+ * + * bytes savepoint_id = 1; + * @return The savepointId. + */ + com.google.protobuf.ByteString getSavepointId(); + + /** + *
+     * Whether to rollback/release the given savepoint.
+     * 
+ * + * .arrow.flight.protocol.sql.ActionEndSavepointRequest.EndSavepoint action = 2; + * @return The enum numeric value on the wire for action. + */ + int getActionValue(); + /** + *
+     * Whether to rollback/release the given savepoint.
+     * 
+ * + * .arrow.flight.protocol.sql.ActionEndSavepointRequest.EndSavepoint action = 2; + * @return The action. + */ + org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest.EndSavepoint getAction(); + } + /** + *
+   *
+   * Request message for the "EndSavepoint" action.
+   *
+   * Release (RELEASE) the savepoint or rollback (ROLLBACK) to the
+   * savepoint.
+   *
+   * Releasing a savepoint invalidates that savepoint.  Rolling back to
+   * a savepoint does not invalidate the savepoint, but invalidates all
+   * savepoints created after the current savepoint.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionEndSavepointRequest} + */ + public static final class ActionEndSavepointRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.ActionEndSavepointRequest) + ActionEndSavepointRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use ActionEndSavepointRequest.newBuilder() to construct. + private ActionEndSavepointRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ActionEndSavepointRequest() { + savepointId_ = com.google.protobuf.ByteString.EMPTY; + action_ = 0; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new ActionEndSavepointRequest(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionEndSavepointRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionEndSavepointRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest.Builder.class); + } + + /** + * Protobuf enum {@code arrow.flight.protocol.sql.ActionEndSavepointRequest.EndSavepoint} + */ + public enum EndSavepoint + implements com.google.protobuf.ProtocolMessageEnum { + /** + * END_SAVEPOINT_UNSPECIFIED = 0; + */ + END_SAVEPOINT_UNSPECIFIED(0), + /** + *
+       * Release the savepoint.
+       * 
+ * + * END_SAVEPOINT_RELEASE = 1; + */ + END_SAVEPOINT_RELEASE(1), + /** + *
+       * Roll back to a savepoint.
+       * 
+ * + * END_SAVEPOINT_ROLLBACK = 2; + */ + END_SAVEPOINT_ROLLBACK(2), + UNRECOGNIZED(-1), + ; + + /** + * END_SAVEPOINT_UNSPECIFIED = 0; + */ + public static final int END_SAVEPOINT_UNSPECIFIED_VALUE = 0; + /** + *
+       * Release the savepoint.
+       * 
+ * + * END_SAVEPOINT_RELEASE = 1; + */ + public static final int END_SAVEPOINT_RELEASE_VALUE = 1; + /** + *
+       * Roll back to a savepoint.
+       * 
+ * + * END_SAVEPOINT_ROLLBACK = 2; + */ + public static final int END_SAVEPOINT_ROLLBACK_VALUE = 2; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static EndSavepoint valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static EndSavepoint forNumber(int value) { + switch (value) { + case 0: return END_SAVEPOINT_UNSPECIFIED; + case 1: return END_SAVEPOINT_RELEASE; + case 2: return END_SAVEPOINT_ROLLBACK; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + EndSavepoint> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public EndSavepoint findValueByNumber(int number) { + return EndSavepoint.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest.getDescriptor().getEnumTypes().get(0); + } + + private static final EndSavepoint[] VALUES = values(); + + public static EndSavepoint valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private EndSavepoint(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.ActionEndSavepointRequest.EndSavepoint) + } + + public static final int SAVEPOINT_ID_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString savepointId_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     * Opaque handle for the savepoint on the server.
+     * 
+ * + * bytes savepoint_id = 1; + * @return The savepointId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getSavepointId() { + return savepointId_; + } + + public static final int ACTION_FIELD_NUMBER = 2; + private int action_ = 0; + /** + *
+     * Whether to rollback/release the given savepoint.
+     * 
+ * + * .arrow.flight.protocol.sql.ActionEndSavepointRequest.EndSavepoint action = 2; + * @return The enum numeric value on the wire for action. + */ + @java.lang.Override public int getActionValue() { + return action_; + } + /** + *
+     * Whether to rollback/release the given savepoint.
+     * 
+ * + * .arrow.flight.protocol.sql.ActionEndSavepointRequest.EndSavepoint action = 2; + * @return The action. + */ + @java.lang.Override public org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest.EndSavepoint getAction() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest.EndSavepoint result = org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest.EndSavepoint.forNumber(action_); + return result == null ? org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest.EndSavepoint.UNRECOGNIZED : result; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!savepointId_.isEmpty()) { + output.writeBytes(1, savepointId_); + } + if (action_ != org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest.EndSavepoint.END_SAVEPOINT_UNSPECIFIED.getNumber()) { + output.writeEnum(2, action_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!savepointId_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, savepointId_); + } + if (action_ != org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest.EndSavepoint.END_SAVEPOINT_UNSPECIFIED.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(2, action_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest other = (org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest) obj; + + if (!getSavepointId() + .equals(other.getSavepointId())) return false; + if (action_ != other.action_) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + SAVEPOINT_ID_FIELD_NUMBER; + hash = (53 * hash) + getSavepointId().hashCode(); + hash = (37 * hash) + ACTION_FIELD_NUMBER; + hash = (53 * hash) + action_; + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Request message for the "EndSavepoint" action.
+     *
+     * Release (RELEASE) the savepoint or rollback (ROLLBACK) to the
+     * savepoint.
+     *
+     * Releasing a savepoint invalidates that savepoint.  Rolling back to
+     * a savepoint does not invalidate the savepoint, but invalidates all
+     * savepoints created after the current savepoint.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionEndSavepointRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.ActionEndSavepointRequest) + org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionEndSavepointRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionEndSavepointRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + savepointId_ = com.google.protobuf.ByteString.EMPTY; + action_ = 0; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionEndSavepointRequest_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest build() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest result = new org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.savepointId_ = savepointId_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.action_ = action_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest.getDefaultInstance()) return this; + if (other.getSavepointId() != com.google.protobuf.ByteString.EMPTY) { + setSavepointId(other.getSavepointId()); + } + if (other.action_ != 0) { + setActionValue(other.getActionValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + savepointId_ = input.readBytes(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 16: { + action_ = input.readEnum(); + bitField0_ |= 0x00000002; + break; + } // case 16 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.ByteString savepointId_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * Opaque handle for the savepoint on the server.
+       * 
+ * + * bytes savepoint_id = 1; + * @return The savepointId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getSavepointId() { + return savepointId_; + } + /** + *
+       * Opaque handle for the savepoint on the server.
+       * 
+ * + * bytes savepoint_id = 1; + * @param value The savepointId to set. + * @return This builder for chaining. + */ + public Builder setSavepointId(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + savepointId_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * Opaque handle for the savepoint on the server.
+       * 
+ * + * bytes savepoint_id = 1; + * @return This builder for chaining. + */ + public Builder clearSavepointId() { + bitField0_ = (bitField0_ & ~0x00000001); + savepointId_ = getDefaultInstance().getSavepointId(); + onChanged(); + return this; + } + + private int action_ = 0; + /** + *
+       * Whether to rollback/release the given savepoint.
+       * 
+ * + * .arrow.flight.protocol.sql.ActionEndSavepointRequest.EndSavepoint action = 2; + * @return The enum numeric value on the wire for action. + */ + @java.lang.Override public int getActionValue() { + return action_; + } + /** + *
+       * Whether to rollback/release the given savepoint.
+       * 
+ * + * .arrow.flight.protocol.sql.ActionEndSavepointRequest.EndSavepoint action = 2; + * @param value The enum numeric value on the wire for action to set. + * @return This builder for chaining. + */ + public Builder setActionValue(int value) { + action_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       * Whether to rollback/release the given savepoint.
+       * 
+ * + * .arrow.flight.protocol.sql.ActionEndSavepointRequest.EndSavepoint action = 2; + * @return The action. + */ + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest.EndSavepoint getAction() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest.EndSavepoint result = org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest.EndSavepoint.forNumber(action_); + return result == null ? org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest.EndSavepoint.UNRECOGNIZED : result; + } + /** + *
+       * Whether to rollback/release the given savepoint.
+       * 
+ * + * .arrow.flight.protocol.sql.ActionEndSavepointRequest.EndSavepoint action = 2; + * @param value The action to set. + * @return This builder for chaining. + */ + public Builder setAction(org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest.EndSavepoint value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + action_ = value.getNumber(); + onChanged(); + return this; + } + /** + *
+       * Whether to rollback/release the given savepoint.
+       * 
+ * + * .arrow.flight.protocol.sql.ActionEndSavepointRequest.EndSavepoint action = 2; + * @return This builder for chaining. + */ + public Builder clearAction() { + bitField0_ = (bitField0_ & ~0x00000002); + action_ = 0; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.ActionEndSavepointRequest) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.ActionEndSavepointRequest) + private static final org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ActionEndSavepointRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionEndSavepointRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface CommandStatementQueryOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.CommandStatementQuery) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * The SQL syntax.
+     * 
+ * + * string query = 1; + * @return The query. + */ + java.lang.String getQuery(); + /** + *
+     * The SQL syntax.
+     * 
+ * + * string query = 1; + * @return The bytes for query. + */ + com.google.protobuf.ByteString + getQueryBytes(); + + /** + *
+     * Include the query as part of this transaction (if unset, the query is auto-committed).
+     * 
+ * + * optional bytes transaction_id = 2; + * @return Whether the transactionId field is set. + */ + boolean hasTransactionId(); + /** + *
+     * Include the query as part of this transaction (if unset, the query is auto-committed).
+     * 
+ * + * optional bytes transaction_id = 2; + * @return The transactionId. + */ + com.google.protobuf.ByteString getTransactionId(); + } + /** + *
+   *
+   * Represents a SQL query. Used in the command member of FlightDescriptor
+   * for the following RPC calls:
+   *  - GetSchema: return the Arrow schema of the query.
+   *    Fields on this schema may contain the following metadata:
+   *    - ARROW:FLIGHT:SQL:CATALOG_NAME      - Table's catalog name
+   *    - ARROW:FLIGHT:SQL:DB_SCHEMA_NAME    - Database schema name
+   *    - ARROW:FLIGHT:SQL:TABLE_NAME        - Table name
+   *    - ARROW:FLIGHT:SQL:TYPE_NAME         - The data source-specific name for the data type of the column.
+   *    - ARROW:FLIGHT:SQL:PRECISION         - Column precision/size
+   *    - ARROW:FLIGHT:SQL:SCALE             - Column scale/decimal digits if applicable
+   *    - ARROW:FLIGHT:SQL:IS_AUTO_INCREMENT - "1" indicates if the column is auto incremented, "0" otherwise.
+   *    - ARROW:FLIGHT:SQL:IS_CASE_SENSITIVE - "1" indicates if the column is case-sensitive, "0" otherwise.
+   *    - ARROW:FLIGHT:SQL:IS_READ_ONLY      - "1" indicates if the column is read only, "0" otherwise.
+   *    - ARROW:FLIGHT:SQL:IS_SEARCHABLE     - "1" indicates if the column is searchable via WHERE clause, "0" otherwise.
+   *  - GetFlightInfo: execute the query.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandStatementQuery} + */ + public static final class CommandStatementQuery extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.CommandStatementQuery) + CommandStatementQueryOrBuilder { + private static final long serialVersionUID = 0L; + // Use CommandStatementQuery.newBuilder() to construct. + private CommandStatementQuery(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private CommandStatementQuery() { + query_ = ""; + transactionId_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new CommandStatementQuery(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandStatementQuery_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandStatementQuery_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery.Builder.class); + } + + private int bitField0_; + public static final int QUERY_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object query_ = ""; + /** + *
+     * The SQL syntax.
+     * 
+ * + * string query = 1; + * @return The query. + */ + @java.lang.Override + public java.lang.String getQuery() { + java.lang.Object ref = query_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + query_ = s; + return s; + } + } + /** + *
+     * The SQL syntax.
+     * 
+ * + * string query = 1; + * @return The bytes for query. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getQueryBytes() { + java.lang.Object ref = query_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + query_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TRANSACTION_ID_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString transactionId_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     * Include the query as part of this transaction (if unset, the query is auto-committed).
+     * 
+ * + * optional bytes transaction_id = 2; + * @return Whether the transactionId field is set. + */ + @java.lang.Override + public boolean hasTransactionId() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+     * Include the query as part of this transaction (if unset, the query is auto-committed).
+     * 
+ * + * optional bytes transaction_id = 2; + * @return The transactionId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTransactionId() { + return transactionId_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(query_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, query_); + } + if (((bitField0_ & 0x00000001) != 0)) { + output.writeBytes(2, transactionId_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(query_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, query_); + } + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, transactionId_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery other = (org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery) obj; + + if (!getQuery() + .equals(other.getQuery())) return false; + if (hasTransactionId() != other.hasTransactionId()) return false; + if (hasTransactionId()) { + if (!getTransactionId() + .equals(other.getTransactionId())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + QUERY_FIELD_NUMBER; + hash = (53 * hash) + getQuery().hashCode(); + if (hasTransactionId()) { + hash = (37 * hash) + TRANSACTION_ID_FIELD_NUMBER; + hash = (53 * hash) + getTransactionId().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Represents a SQL query. Used in the command member of FlightDescriptor
+     * for the following RPC calls:
+     *  - GetSchema: return the Arrow schema of the query.
+     *    Fields on this schema may contain the following metadata:
+     *    - ARROW:FLIGHT:SQL:CATALOG_NAME      - Table's catalog name
+     *    - ARROW:FLIGHT:SQL:DB_SCHEMA_NAME    - Database schema name
+     *    - ARROW:FLIGHT:SQL:TABLE_NAME        - Table name
+     *    - ARROW:FLIGHT:SQL:TYPE_NAME         - The data source-specific name for the data type of the column.
+     *    - ARROW:FLIGHT:SQL:PRECISION         - Column precision/size
+     *    - ARROW:FLIGHT:SQL:SCALE             - Column scale/decimal digits if applicable
+     *    - ARROW:FLIGHT:SQL:IS_AUTO_INCREMENT - "1" indicates if the column is auto incremented, "0" otherwise.
+     *    - ARROW:FLIGHT:SQL:IS_CASE_SENSITIVE - "1" indicates if the column is case-sensitive, "0" otherwise.
+     *    - ARROW:FLIGHT:SQL:IS_READ_ONLY      - "1" indicates if the column is read only, "0" otherwise.
+     *    - ARROW:FLIGHT:SQL:IS_SEARCHABLE     - "1" indicates if the column is searchable via WHERE clause, "0" otherwise.
+     *  - GetFlightInfo: execute the query.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandStatementQuery} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.CommandStatementQuery) + org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQueryOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandStatementQuery_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandStatementQuery_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + query_ = ""; + transactionId_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandStatementQuery_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery build() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery result = new org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.query_ = query_; + } + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000002) != 0)) { + result.transactionId_ = transactionId_; + to_bitField0_ |= 0x00000001; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery.getDefaultInstance()) return this; + if (!other.getQuery().isEmpty()) { + query_ = other.query_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.hasTransactionId()) { + setTransactionId(other.getTransactionId()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + query_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + transactionId_ = input.readBytes(); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object query_ = ""; + /** + *
+       * The SQL syntax.
+       * 
+ * + * string query = 1; + * @return The query. + */ + public java.lang.String getQuery() { + java.lang.Object ref = query_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + query_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * The SQL syntax.
+       * 
+ * + * string query = 1; + * @return The bytes for query. + */ + public com.google.protobuf.ByteString + getQueryBytes() { + java.lang.Object ref = query_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + query_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * The SQL syntax.
+       * 
+ * + * string query = 1; + * @param value The query to set. + * @return This builder for chaining. + */ + public Builder setQuery( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + query_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * The SQL syntax.
+       * 
+ * + * string query = 1; + * @return This builder for chaining. + */ + public Builder clearQuery() { + query_ = getDefaultInstance().getQuery(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + *
+       * The SQL syntax.
+       * 
+ * + * string query = 1; + * @param value The bytes for query to set. + * @return This builder for chaining. + */ + public Builder setQueryBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + query_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private com.google.protobuf.ByteString transactionId_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * Include the query as part of this transaction (if unset, the query is auto-committed).
+       * 
+ * + * optional bytes transaction_id = 2; + * @return Whether the transactionId field is set. + */ + @java.lang.Override + public boolean hasTransactionId() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+       * Include the query as part of this transaction (if unset, the query is auto-committed).
+       * 
+ * + * optional bytes transaction_id = 2; + * @return The transactionId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTransactionId() { + return transactionId_; + } + /** + *
+       * Include the query as part of this transaction (if unset, the query is auto-committed).
+       * 
+ * + * optional bytes transaction_id = 2; + * @param value The transactionId to set. + * @return This builder for chaining. + */ + public Builder setTransactionId(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + transactionId_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       * Include the query as part of this transaction (if unset, the query is auto-committed).
+       * 
+ * + * optional bytes transaction_id = 2; + * @return This builder for chaining. + */ + public Builder clearTransactionId() { + bitField0_ = (bitField0_ & ~0x00000002); + transactionId_ = getDefaultInstance().getTransactionId(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.CommandStatementQuery) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.CommandStatementQuery) + private static final org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CommandStatementQuery parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementQuery getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface CommandStatementSubstraitPlanOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.CommandStatementSubstraitPlan) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * A serialized substrait.Plan
+     * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + * @return Whether the plan field is set. + */ + boolean hasPlan(); + /** + *
+     * A serialized substrait.Plan
+     * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + * @return The plan. + */ + org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan getPlan(); + /** + *
+     * A serialized substrait.Plan
+     * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + */ + org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlanOrBuilder getPlanOrBuilder(); + + /** + *
+     * Include the query as part of this transaction (if unset, the query is auto-committed).
+     * 
+ * + * optional bytes transaction_id = 2; + * @return Whether the transactionId field is set. + */ + boolean hasTransactionId(); + /** + *
+     * Include the query as part of this transaction (if unset, the query is auto-committed).
+     * 
+ * + * optional bytes transaction_id = 2; + * @return The transactionId. + */ + com.google.protobuf.ByteString getTransactionId(); + } + /** + *
+   *
+   * Represents a Substrait plan. Used in the command member of FlightDescriptor
+   * for the following RPC calls:
+   *  - GetSchema: return the Arrow schema of the query.
+   *    Fields on this schema may contain the following metadata:
+   *    - ARROW:FLIGHT:SQL:CATALOG_NAME      - Table's catalog name
+   *    - ARROW:FLIGHT:SQL:DB_SCHEMA_NAME    - Database schema name
+   *    - ARROW:FLIGHT:SQL:TABLE_NAME        - Table name
+   *    - ARROW:FLIGHT:SQL:TYPE_NAME         - The data source-specific name for the data type of the column.
+   *    - ARROW:FLIGHT:SQL:PRECISION         - Column precision/size
+   *    - ARROW:FLIGHT:SQL:SCALE             - Column scale/decimal digits if applicable
+   *    - ARROW:FLIGHT:SQL:IS_AUTO_INCREMENT - "1" indicates if the column is auto incremented, "0" otherwise.
+   *    - ARROW:FLIGHT:SQL:IS_CASE_SENSITIVE - "1" indicates if the column is case-sensitive, "0" otherwise.
+   *    - ARROW:FLIGHT:SQL:IS_READ_ONLY      - "1" indicates if the column is read only, "0" otherwise.
+   *    - ARROW:FLIGHT:SQL:IS_SEARCHABLE     - "1" indicates if the column is searchable via WHERE clause, "0" otherwise.
+   *  - GetFlightInfo: execute the query.
+   *  - DoPut: execute the query.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandStatementSubstraitPlan} + */ + public static final class CommandStatementSubstraitPlan extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.CommandStatementSubstraitPlan) + CommandStatementSubstraitPlanOrBuilder { + private static final long serialVersionUID = 0L; + // Use CommandStatementSubstraitPlan.newBuilder() to construct. + private CommandStatementSubstraitPlan(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private CommandStatementSubstraitPlan() { + transactionId_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new CommandStatementSubstraitPlan(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandStatementSubstraitPlan_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandStatementSubstraitPlan_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan.Builder.class); + } + + private int bitField0_; + public static final int PLAN_FIELD_NUMBER = 1; + private org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan plan_; + /** + *
+     * A serialized substrait.Plan
+     * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + * @return Whether the plan field is set. + */ + @java.lang.Override + public boolean hasPlan() { + return plan_ != null; + } + /** + *
+     * A serialized substrait.Plan
+     * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + * @return The plan. + */ + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan getPlan() { + return plan_ == null ? org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.getDefaultInstance() : plan_; + } + /** + *
+     * A serialized substrait.Plan
+     * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + */ + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlanOrBuilder getPlanOrBuilder() { + return plan_ == null ? org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.getDefaultInstance() : plan_; + } + + public static final int TRANSACTION_ID_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString transactionId_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     * Include the query as part of this transaction (if unset, the query is auto-committed).
+     * 
+ * + * optional bytes transaction_id = 2; + * @return Whether the transactionId field is set. + */ + @java.lang.Override + public boolean hasTransactionId() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+     * Include the query as part of this transaction (if unset, the query is auto-committed).
+     * 
+ * + * optional bytes transaction_id = 2; + * @return The transactionId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTransactionId() { + return transactionId_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (plan_ != null) { + output.writeMessage(1, getPlan()); + } + if (((bitField0_ & 0x00000001) != 0)) { + output.writeBytes(2, transactionId_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (plan_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getPlan()); + } + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, transactionId_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan other = (org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan) obj; + + if (hasPlan() != other.hasPlan()) return false; + if (hasPlan()) { + if (!getPlan() + .equals(other.getPlan())) return false; + } + if (hasTransactionId() != other.hasTransactionId()) return false; + if (hasTransactionId()) { + if (!getTransactionId() + .equals(other.getTransactionId())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasPlan()) { + hash = (37 * hash) + PLAN_FIELD_NUMBER; + hash = (53 * hash) + getPlan().hashCode(); + } + if (hasTransactionId()) { + hash = (37 * hash) + TRANSACTION_ID_FIELD_NUMBER; + hash = (53 * hash) + getTransactionId().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Represents a Substrait plan. Used in the command member of FlightDescriptor
+     * for the following RPC calls:
+     *  - GetSchema: return the Arrow schema of the query.
+     *    Fields on this schema may contain the following metadata:
+     *    - ARROW:FLIGHT:SQL:CATALOG_NAME      - Table's catalog name
+     *    - ARROW:FLIGHT:SQL:DB_SCHEMA_NAME    - Database schema name
+     *    - ARROW:FLIGHT:SQL:TABLE_NAME        - Table name
+     *    - ARROW:FLIGHT:SQL:TYPE_NAME         - The data source-specific name for the data type of the column.
+     *    - ARROW:FLIGHT:SQL:PRECISION         - Column precision/size
+     *    - ARROW:FLIGHT:SQL:SCALE             - Column scale/decimal digits if applicable
+     *    - ARROW:FLIGHT:SQL:IS_AUTO_INCREMENT - "1" indicates if the column is auto incremented, "0" otherwise.
+     *    - ARROW:FLIGHT:SQL:IS_CASE_SENSITIVE - "1" indicates if the column is case-sensitive, "0" otherwise.
+     *    - ARROW:FLIGHT:SQL:IS_READ_ONLY      - "1" indicates if the column is read only, "0" otherwise.
+     *    - ARROW:FLIGHT:SQL:IS_SEARCHABLE     - "1" indicates if the column is searchable via WHERE clause, "0" otherwise.
+     *  - GetFlightInfo: execute the query.
+     *  - DoPut: execute the query.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandStatementSubstraitPlan} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.CommandStatementSubstraitPlan) + org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlanOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandStatementSubstraitPlan_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandStatementSubstraitPlan_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + plan_ = null; + if (planBuilder_ != null) { + planBuilder_.dispose(); + planBuilder_ = null; + } + transactionId_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandStatementSubstraitPlan_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan build() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan result = new org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.plan_ = planBuilder_ == null + ? plan_ + : planBuilder_.build(); + } + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000002) != 0)) { + result.transactionId_ = transactionId_; + to_bitField0_ |= 0x00000001; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan.getDefaultInstance()) return this; + if (other.hasPlan()) { + mergePlan(other.getPlan()); + } + if (other.hasTransactionId()) { + setTransactionId(other.getTransactionId()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + input.readMessage( + getPlanFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + transactionId_ = input.readBytes(); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan plan_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan, org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.Builder, org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlanOrBuilder> planBuilder_; + /** + *
+       * A serialized substrait.Plan
+       * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + * @return Whether the plan field is set. + */ + public boolean hasPlan() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+       * A serialized substrait.Plan
+       * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + * @return The plan. + */ + public org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan getPlan() { + if (planBuilder_ == null) { + return plan_ == null ? org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.getDefaultInstance() : plan_; + } else { + return planBuilder_.getMessage(); + } + } + /** + *
+       * A serialized substrait.Plan
+       * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + */ + public Builder setPlan(org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan value) { + if (planBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + plan_ = value; + } else { + planBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * A serialized substrait.Plan
+       * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + */ + public Builder setPlan( + org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.Builder builderForValue) { + if (planBuilder_ == null) { + plan_ = builderForValue.build(); + } else { + planBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * A serialized substrait.Plan
+       * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + */ + public Builder mergePlan(org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan value) { + if (planBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0) && + plan_ != null && + plan_ != org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.getDefaultInstance()) { + getPlanBuilder().mergeFrom(value); + } else { + plan_ = value; + } + } else { + planBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * A serialized substrait.Plan
+       * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + */ + public Builder clearPlan() { + bitField0_ = (bitField0_ & ~0x00000001); + plan_ = null; + if (planBuilder_ != null) { + planBuilder_.dispose(); + planBuilder_ = null; + } + onChanged(); + return this; + } + /** + *
+       * A serialized substrait.Plan
+       * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + */ + public org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.Builder getPlanBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getPlanFieldBuilder().getBuilder(); + } + /** + *
+       * A serialized substrait.Plan
+       * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + */ + public org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlanOrBuilder getPlanOrBuilder() { + if (planBuilder_ != null) { + return planBuilder_.getMessageOrBuilder(); + } else { + return plan_ == null ? + org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.getDefaultInstance() : plan_; + } + } + /** + *
+       * A serialized substrait.Plan
+       * 
+ * + * .arrow.flight.protocol.sql.SubstraitPlan plan = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan, org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.Builder, org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlanOrBuilder> + getPlanFieldBuilder() { + if (planBuilder_ == null) { + planBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan, org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlan.Builder, org.apache.arrow.flight.sql.impl.FlightSql.SubstraitPlanOrBuilder>( + getPlan(), + getParentForChildren(), + isClean()); + plan_ = null; + } + return planBuilder_; + } + + private com.google.protobuf.ByteString transactionId_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * Include the query as part of this transaction (if unset, the query is auto-committed).
+       * 
+ * + * optional bytes transaction_id = 2; + * @return Whether the transactionId field is set. + */ + @java.lang.Override + public boolean hasTransactionId() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+       * Include the query as part of this transaction (if unset, the query is auto-committed).
+       * 
+ * + * optional bytes transaction_id = 2; + * @return The transactionId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTransactionId() { + return transactionId_; + } + /** + *
+       * Include the query as part of this transaction (if unset, the query is auto-committed).
+       * 
+ * + * optional bytes transaction_id = 2; + * @param value The transactionId to set. + * @return This builder for chaining. + */ + public Builder setTransactionId(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + transactionId_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       * Include the query as part of this transaction (if unset, the query is auto-committed).
+       * 
+ * + * optional bytes transaction_id = 2; + * @return This builder for chaining. + */ + public Builder clearTransactionId() { + bitField0_ = (bitField0_ & ~0x00000002); + transactionId_ = getDefaultInstance().getTransactionId(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.CommandStatementSubstraitPlan) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.CommandStatementSubstraitPlan) + private static final org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CommandStatementSubstraitPlan parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementSubstraitPlan getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface TicketStatementQueryOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.TicketStatementQuery) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * Unique identifier for the instance of the statement to execute.
+     * 
+ * + * bytes statement_handle = 1; + * @return The statementHandle. + */ + com.google.protobuf.ByteString getStatementHandle(); + } + /** + *
+   **
+   * Represents a ticket resulting from GetFlightInfo with a CommandStatementQuery.
+   * This should be used only once and treated as an opaque value, that is, clients should not attempt to parse this.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.TicketStatementQuery} + */ + public static final class TicketStatementQuery extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.TicketStatementQuery) + TicketStatementQueryOrBuilder { + private static final long serialVersionUID = 0L; + // Use TicketStatementQuery.newBuilder() to construct. + private TicketStatementQuery(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private TicketStatementQuery() { + statementHandle_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new TicketStatementQuery(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_TicketStatementQuery_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_TicketStatementQuery_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery.class, org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery.Builder.class); + } + + public static final int STATEMENT_HANDLE_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString statementHandle_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     * Unique identifier for the instance of the statement to execute.
+     * 
+ * + * bytes statement_handle = 1; + * @return The statementHandle. + */ + @java.lang.Override + public com.google.protobuf.ByteString getStatementHandle() { + return statementHandle_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!statementHandle_.isEmpty()) { + output.writeBytes(1, statementHandle_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!statementHandle_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, statementHandle_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery other = (org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery) obj; + + if (!getStatementHandle() + .equals(other.getStatementHandle())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + STATEMENT_HANDLE_FIELD_NUMBER; + hash = (53 * hash) + getStatementHandle().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     **
+     * Represents a ticket resulting from GetFlightInfo with a CommandStatementQuery.
+     * This should be used only once and treated as an opaque value, that is, clients should not attempt to parse this.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.TicketStatementQuery} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.TicketStatementQuery) + org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQueryOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_TicketStatementQuery_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_TicketStatementQuery_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery.class, org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + statementHandle_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_TicketStatementQuery_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery build() { + org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery result = new org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.statementHandle_ = statementHandle_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery.getDefaultInstance()) return this; + if (other.getStatementHandle() != com.google.protobuf.ByteString.EMPTY) { + setStatementHandle(other.getStatementHandle()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + statementHandle_ = input.readBytes(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.ByteString statementHandle_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * Unique identifier for the instance of the statement to execute.
+       * 
+ * + * bytes statement_handle = 1; + * @return The statementHandle. + */ + @java.lang.Override + public com.google.protobuf.ByteString getStatementHandle() { + return statementHandle_; + } + /** + *
+       * Unique identifier for the instance of the statement to execute.
+       * 
+ * + * bytes statement_handle = 1; + * @param value The statementHandle to set. + * @return This builder for chaining. + */ + public Builder setStatementHandle(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + statementHandle_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * Unique identifier for the instance of the statement to execute.
+       * 
+ * + * bytes statement_handle = 1; + * @return This builder for chaining. + */ + public Builder clearStatementHandle() { + bitField0_ = (bitField0_ & ~0x00000001); + statementHandle_ = getDefaultInstance().getStatementHandle(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.TicketStatementQuery) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.TicketStatementQuery) + private static final org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public TicketStatementQuery parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.TicketStatementQuery getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface CommandPreparedStatementQueryOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.CommandPreparedStatementQuery) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * Opaque handle for the prepared statement on the server.
+     * 
+ * + * bytes prepared_statement_handle = 1; + * @return The preparedStatementHandle. + */ + com.google.protobuf.ByteString getPreparedStatementHandle(); + } + /** + *
+   *
+   * Represents an instance of executing a prepared statement. Used in the command member of FlightDescriptor for
+   * the following RPC calls:
+   *  - GetSchema: return the Arrow schema of the query.
+   *    Fields on this schema may contain the following metadata:
+   *    - ARROW:FLIGHT:SQL:CATALOG_NAME      - Table's catalog name
+   *    - ARROW:FLIGHT:SQL:DB_SCHEMA_NAME    - Database schema name
+   *    - ARROW:FLIGHT:SQL:TABLE_NAME        - Table name
+   *    - ARROW:FLIGHT:SQL:TYPE_NAME         - The data source-specific name for the data type of the column.
+   *    - ARROW:FLIGHT:SQL:PRECISION         - Column precision/size
+   *    - ARROW:FLIGHT:SQL:SCALE             - Column scale/decimal digits if applicable
+   *    - ARROW:FLIGHT:SQL:IS_AUTO_INCREMENT - "1" indicates if the column is auto incremented, "0" otherwise.
+   *    - ARROW:FLIGHT:SQL:IS_CASE_SENSITIVE - "1" indicates if the column is case-sensitive, "0" otherwise.
+   *    - ARROW:FLIGHT:SQL:IS_READ_ONLY      - "1" indicates if the column is read only, "0" otherwise.
+   *    - ARROW:FLIGHT:SQL:IS_SEARCHABLE     - "1" indicates if the column is searchable via WHERE clause, "0" otherwise.
+   *
+   *    If the schema is retrieved after parameter values have been bound with DoPut, then the server should account
+   *    for the parameters when determining the schema.
+   *  - DoPut: bind parameter values. All of the bound parameter sets will be executed as a single atomic execution.
+   *  - GetFlightInfo: execute the prepared statement instance.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandPreparedStatementQuery} + */ + public static final class CommandPreparedStatementQuery extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.CommandPreparedStatementQuery) + CommandPreparedStatementQueryOrBuilder { + private static final long serialVersionUID = 0L; + // Use CommandPreparedStatementQuery.newBuilder() to construct. + private CommandPreparedStatementQuery(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private CommandPreparedStatementQuery() { + preparedStatementHandle_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new CommandPreparedStatementQuery(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandPreparedStatementQuery_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandPreparedStatementQuery_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery.Builder.class); + } + + public static final int PREPARED_STATEMENT_HANDLE_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString preparedStatementHandle_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     * Opaque handle for the prepared statement on the server.
+     * 
+ * + * bytes prepared_statement_handle = 1; + * @return The preparedStatementHandle. + */ + @java.lang.Override + public com.google.protobuf.ByteString getPreparedStatementHandle() { + return preparedStatementHandle_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!preparedStatementHandle_.isEmpty()) { + output.writeBytes(1, preparedStatementHandle_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!preparedStatementHandle_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, preparedStatementHandle_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery other = (org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery) obj; + + if (!getPreparedStatementHandle() + .equals(other.getPreparedStatementHandle())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + PREPARED_STATEMENT_HANDLE_FIELD_NUMBER; + hash = (53 * hash) + getPreparedStatementHandle().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Represents an instance of executing a prepared statement. Used in the command member of FlightDescriptor for
+     * the following RPC calls:
+     *  - GetSchema: return the Arrow schema of the query.
+     *    Fields on this schema may contain the following metadata:
+     *    - ARROW:FLIGHT:SQL:CATALOG_NAME      - Table's catalog name
+     *    - ARROW:FLIGHT:SQL:DB_SCHEMA_NAME    - Database schema name
+     *    - ARROW:FLIGHT:SQL:TABLE_NAME        - Table name
+     *    - ARROW:FLIGHT:SQL:TYPE_NAME         - The data source-specific name for the data type of the column.
+     *    - ARROW:FLIGHT:SQL:PRECISION         - Column precision/size
+     *    - ARROW:FLIGHT:SQL:SCALE             - Column scale/decimal digits if applicable
+     *    - ARROW:FLIGHT:SQL:IS_AUTO_INCREMENT - "1" indicates if the column is auto incremented, "0" otherwise.
+     *    - ARROW:FLIGHT:SQL:IS_CASE_SENSITIVE - "1" indicates if the column is case-sensitive, "0" otherwise.
+     *    - ARROW:FLIGHT:SQL:IS_READ_ONLY      - "1" indicates if the column is read only, "0" otherwise.
+     *    - ARROW:FLIGHT:SQL:IS_SEARCHABLE     - "1" indicates if the column is searchable via WHERE clause, "0" otherwise.
+     *
+     *    If the schema is retrieved after parameter values have been bound with DoPut, then the server should account
+     *    for the parameters when determining the schema.
+     *  - DoPut: bind parameter values. All of the bound parameter sets will be executed as a single atomic execution.
+     *  - GetFlightInfo: execute the prepared statement instance.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandPreparedStatementQuery} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.CommandPreparedStatementQuery) + org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQueryOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandPreparedStatementQuery_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandPreparedStatementQuery_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + preparedStatementHandle_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandPreparedStatementQuery_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery build() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery result = new org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.preparedStatementHandle_ = preparedStatementHandle_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery.getDefaultInstance()) return this; + if (other.getPreparedStatementHandle() != com.google.protobuf.ByteString.EMPTY) { + setPreparedStatementHandle(other.getPreparedStatementHandle()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + preparedStatementHandle_ = input.readBytes(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.ByteString preparedStatementHandle_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * Opaque handle for the prepared statement on the server.
+       * 
+ * + * bytes prepared_statement_handle = 1; + * @return The preparedStatementHandle. + */ + @java.lang.Override + public com.google.protobuf.ByteString getPreparedStatementHandle() { + return preparedStatementHandle_; + } + /** + *
+       * Opaque handle for the prepared statement on the server.
+       * 
+ * + * bytes prepared_statement_handle = 1; + * @param value The preparedStatementHandle to set. + * @return This builder for chaining. + */ + public Builder setPreparedStatementHandle(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + preparedStatementHandle_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * Opaque handle for the prepared statement on the server.
+       * 
+ * + * bytes prepared_statement_handle = 1; + * @return This builder for chaining. + */ + public Builder clearPreparedStatementHandle() { + bitField0_ = (bitField0_ & ~0x00000001); + preparedStatementHandle_ = getDefaultInstance().getPreparedStatementHandle(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.CommandPreparedStatementQuery) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.CommandPreparedStatementQuery) + private static final org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CommandPreparedStatementQuery parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementQuery getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface CommandStatementUpdateOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.CommandStatementUpdate) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * The SQL syntax.
+     * 
+ * + * string query = 1; + * @return The query. + */ + java.lang.String getQuery(); + /** + *
+     * The SQL syntax.
+     * 
+ * + * string query = 1; + * @return The bytes for query. + */ + com.google.protobuf.ByteString + getQueryBytes(); + + /** + *
+     * Include the query as part of this transaction (if unset, the query is auto-committed).
+     * 
+ * + * optional bytes transaction_id = 2; + * @return Whether the transactionId field is set. + */ + boolean hasTransactionId(); + /** + *
+     * Include the query as part of this transaction (if unset, the query is auto-committed).
+     * 
+ * + * optional bytes transaction_id = 2; + * @return The transactionId. + */ + com.google.protobuf.ByteString getTransactionId(); + } + /** + *
+   *
+   * Represents a SQL update query. Used in the command member of FlightDescriptor
+   * for the RPC call DoPut to cause the server to execute the included SQL update.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandStatementUpdate} + */ + public static final class CommandStatementUpdate extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.CommandStatementUpdate) + CommandStatementUpdateOrBuilder { + private static final long serialVersionUID = 0L; + // Use CommandStatementUpdate.newBuilder() to construct. + private CommandStatementUpdate(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private CommandStatementUpdate() { + query_ = ""; + transactionId_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new CommandStatementUpdate(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandStatementUpdate_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandStatementUpdate_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate.Builder.class); + } + + private int bitField0_; + public static final int QUERY_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object query_ = ""; + /** + *
+     * The SQL syntax.
+     * 
+ * + * string query = 1; + * @return The query. + */ + @java.lang.Override + public java.lang.String getQuery() { + java.lang.Object ref = query_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + query_ = s; + return s; + } + } + /** + *
+     * The SQL syntax.
+     * 
+ * + * string query = 1; + * @return The bytes for query. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getQueryBytes() { + java.lang.Object ref = query_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + query_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TRANSACTION_ID_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString transactionId_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     * Include the query as part of this transaction (if unset, the query is auto-committed).
+     * 
+ * + * optional bytes transaction_id = 2; + * @return Whether the transactionId field is set. + */ + @java.lang.Override + public boolean hasTransactionId() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+     * Include the query as part of this transaction (if unset, the query is auto-committed).
+     * 
+ * + * optional bytes transaction_id = 2; + * @return The transactionId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTransactionId() { + return transactionId_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(query_)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, query_); + } + if (((bitField0_ & 0x00000001) != 0)) { + output.writeBytes(2, transactionId_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(query_)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, query_); + } + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, transactionId_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate other = (org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate) obj; + + if (!getQuery() + .equals(other.getQuery())) return false; + if (hasTransactionId() != other.hasTransactionId()) return false; + if (hasTransactionId()) { + if (!getTransactionId() + .equals(other.getTransactionId())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + QUERY_FIELD_NUMBER; + hash = (53 * hash) + getQuery().hashCode(); + if (hasTransactionId()) { + hash = (37 * hash) + TRANSACTION_ID_FIELD_NUMBER; + hash = (53 * hash) + getTransactionId().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Represents a SQL update query. Used in the command member of FlightDescriptor
+     * for the RPC call DoPut to cause the server to execute the included SQL update.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandStatementUpdate} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.CommandStatementUpdate) + org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdateOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandStatementUpdate_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandStatementUpdate_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + query_ = ""; + transactionId_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandStatementUpdate_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate build() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate result = new org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.query_ = query_; + } + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000002) != 0)) { + result.transactionId_ = transactionId_; + to_bitField0_ |= 0x00000001; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate.getDefaultInstance()) return this; + if (!other.getQuery().isEmpty()) { + query_ = other.query_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.hasTransactionId()) { + setTransactionId(other.getTransactionId()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + query_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + transactionId_ = input.readBytes(); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object query_ = ""; + /** + *
+       * The SQL syntax.
+       * 
+ * + * string query = 1; + * @return The query. + */ + public java.lang.String getQuery() { + java.lang.Object ref = query_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + query_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * The SQL syntax.
+       * 
+ * + * string query = 1; + * @return The bytes for query. + */ + public com.google.protobuf.ByteString + getQueryBytes() { + java.lang.Object ref = query_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + query_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * The SQL syntax.
+       * 
+ * + * string query = 1; + * @param value The query to set. + * @return This builder for chaining. + */ + public Builder setQuery( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + query_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * The SQL syntax.
+       * 
+ * + * string query = 1; + * @return This builder for chaining. + */ + public Builder clearQuery() { + query_ = getDefaultInstance().getQuery(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + *
+       * The SQL syntax.
+       * 
+ * + * string query = 1; + * @param value The bytes for query to set. + * @return This builder for chaining. + */ + public Builder setQueryBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + query_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private com.google.protobuf.ByteString transactionId_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * Include the query as part of this transaction (if unset, the query is auto-committed).
+       * 
+ * + * optional bytes transaction_id = 2; + * @return Whether the transactionId field is set. + */ + @java.lang.Override + public boolean hasTransactionId() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+       * Include the query as part of this transaction (if unset, the query is auto-committed).
+       * 
+ * + * optional bytes transaction_id = 2; + * @return The transactionId. + */ + @java.lang.Override + public com.google.protobuf.ByteString getTransactionId() { + return transactionId_; + } + /** + *
+       * Include the query as part of this transaction (if unset, the query is auto-committed).
+       * 
+ * + * optional bytes transaction_id = 2; + * @param value The transactionId to set. + * @return This builder for chaining. + */ + public Builder setTransactionId(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + transactionId_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       * Include the query as part of this transaction (if unset, the query is auto-committed).
+       * 
+ * + * optional bytes transaction_id = 2; + * @return This builder for chaining. + */ + public Builder clearTransactionId() { + bitField0_ = (bitField0_ & ~0x00000002); + transactionId_ = getDefaultInstance().getTransactionId(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.CommandStatementUpdate) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.CommandStatementUpdate) + private static final org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CommandStatementUpdate parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandStatementUpdate getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface CommandPreparedStatementUpdateOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.CommandPreparedStatementUpdate) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * Opaque handle for the prepared statement on the server.
+     * 
+ * + * bytes prepared_statement_handle = 1; + * @return The preparedStatementHandle. + */ + com.google.protobuf.ByteString getPreparedStatementHandle(); + } + /** + *
+   *
+   * Represents a SQL update query. Used in the command member of FlightDescriptor
+   * for the RPC call DoPut to cause the server to execute the included
+   * prepared statement handle as an update.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandPreparedStatementUpdate} + */ + public static final class CommandPreparedStatementUpdate extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.CommandPreparedStatementUpdate) + CommandPreparedStatementUpdateOrBuilder { + private static final long serialVersionUID = 0L; + // Use CommandPreparedStatementUpdate.newBuilder() to construct. + private CommandPreparedStatementUpdate(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private CommandPreparedStatementUpdate() { + preparedStatementHandle_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new CommandPreparedStatementUpdate(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandPreparedStatementUpdate_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandPreparedStatementUpdate_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate.Builder.class); + } + + public static final int PREPARED_STATEMENT_HANDLE_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString preparedStatementHandle_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     * Opaque handle for the prepared statement on the server.
+     * 
+ * + * bytes prepared_statement_handle = 1; + * @return The preparedStatementHandle. + */ + @java.lang.Override + public com.google.protobuf.ByteString getPreparedStatementHandle() { + return preparedStatementHandle_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!preparedStatementHandle_.isEmpty()) { + output.writeBytes(1, preparedStatementHandle_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!preparedStatementHandle_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, preparedStatementHandle_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate other = (org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate) obj; + + if (!getPreparedStatementHandle() + .equals(other.getPreparedStatementHandle())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + PREPARED_STATEMENT_HANDLE_FIELD_NUMBER; + hash = (53 * hash) + getPreparedStatementHandle().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Represents a SQL update query. Used in the command member of FlightDescriptor
+     * for the RPC call DoPut to cause the server to execute the included
+     * prepared statement handle as an update.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.CommandPreparedStatementUpdate} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.CommandPreparedStatementUpdate) + org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdateOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandPreparedStatementUpdate_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandPreparedStatementUpdate_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate.class, org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + preparedStatementHandle_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_CommandPreparedStatementUpdate_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate build() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate result = new org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.preparedStatementHandle_ = preparedStatementHandle_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate.getDefaultInstance()) return this; + if (other.getPreparedStatementHandle() != com.google.protobuf.ByteString.EMPTY) { + setPreparedStatementHandle(other.getPreparedStatementHandle()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + preparedStatementHandle_ = input.readBytes(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.ByteString preparedStatementHandle_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * Opaque handle for the prepared statement on the server.
+       * 
+ * + * bytes prepared_statement_handle = 1; + * @return The preparedStatementHandle. + */ + @java.lang.Override + public com.google.protobuf.ByteString getPreparedStatementHandle() { + return preparedStatementHandle_; + } + /** + *
+       * Opaque handle for the prepared statement on the server.
+       * 
+ * + * bytes prepared_statement_handle = 1; + * @param value The preparedStatementHandle to set. + * @return This builder for chaining. + */ + public Builder setPreparedStatementHandle(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + preparedStatementHandle_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * Opaque handle for the prepared statement on the server.
+       * 
+ * + * bytes prepared_statement_handle = 1; + * @return This builder for chaining. + */ + public Builder clearPreparedStatementHandle() { + bitField0_ = (bitField0_ & ~0x00000001); + preparedStatementHandle_ = getDefaultInstance().getPreparedStatementHandle(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.CommandPreparedStatementUpdate) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.CommandPreparedStatementUpdate) + private static final org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public CommandPreparedStatementUpdate parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.CommandPreparedStatementUpdate getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface DoPutUpdateResultOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.DoPutUpdateResult) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * The number of records updated. A return value of -1 represents
+     * an unknown updated record count.
+     * 
+ * + * int64 record_count = 1; + * @return The recordCount. + */ + long getRecordCount(); + } + /** + *
+   *
+   * Returned from the RPC call DoPut when a CommandStatementUpdate
+   * CommandPreparedStatementUpdate was in the request, containing
+   * results from the update.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.DoPutUpdateResult} + */ + public static final class DoPutUpdateResult extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.DoPutUpdateResult) + DoPutUpdateResultOrBuilder { + private static final long serialVersionUID = 0L; + // Use DoPutUpdateResult.newBuilder() to construct. + private DoPutUpdateResult(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private DoPutUpdateResult() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new DoPutUpdateResult(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_DoPutUpdateResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_DoPutUpdateResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult.class, org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult.Builder.class); + } + + public static final int RECORD_COUNT_FIELD_NUMBER = 1; + private long recordCount_ = 0L; + /** + *
+     * The number of records updated. A return value of -1 represents
+     * an unknown updated record count.
+     * 
+ * + * int64 record_count = 1; + * @return The recordCount. + */ + @java.lang.Override + public long getRecordCount() { + return recordCount_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (recordCount_ != 0L) { + output.writeInt64(1, recordCount_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (recordCount_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(1, recordCount_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult other = (org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult) obj; + + if (getRecordCount() + != other.getRecordCount()) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + RECORD_COUNT_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getRecordCount()); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Returned from the RPC call DoPut when a CommandStatementUpdate
+     * CommandPreparedStatementUpdate was in the request, containing
+     * results from the update.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.DoPutUpdateResult} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.DoPutUpdateResult) + org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_DoPutUpdateResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_DoPutUpdateResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult.class, org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + recordCount_ = 0L; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_DoPutUpdateResult_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult build() { + org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult result = new org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.recordCount_ = recordCount_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult.getDefaultInstance()) return this; + if (other.getRecordCount() != 0L) { + setRecordCount(other.getRecordCount()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + recordCount_ = input.readInt64(); + bitField0_ |= 0x00000001; + break; + } // case 8 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private long recordCount_ ; + /** + *
+       * The number of records updated. A return value of -1 represents
+       * an unknown updated record count.
+       * 
+ * + * int64 record_count = 1; + * @return The recordCount. + */ + @java.lang.Override + public long getRecordCount() { + return recordCount_; + } + /** + *
+       * The number of records updated. A return value of -1 represents
+       * an unknown updated record count.
+       * 
+ * + * int64 record_count = 1; + * @param value The recordCount to set. + * @return This builder for chaining. + */ + public Builder setRecordCount(long value) { + + recordCount_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * The number of records updated. A return value of -1 represents
+       * an unknown updated record count.
+       * 
+ * + * int64 record_count = 1; + * @return This builder for chaining. + */ + public Builder clearRecordCount() { + bitField0_ = (bitField0_ & ~0x00000001); + recordCount_ = 0L; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.DoPutUpdateResult) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.DoPutUpdateResult) + private static final org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public DoPutUpdateResult parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.DoPutUpdateResult getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface DoPutPreparedStatementResultOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.DoPutPreparedStatementResult) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * Represents a (potentially updated) opaque handle for the prepared statement on the server.
+     * Because the handle could potentially be updated, any previous handles for this prepared
+     * statement should be considered invalid, and all subsequent requests for this prepared
+     * statement must use this new handle.
+     * The updated handle allows implementing query parameters with stateless services.
+     * 
+     * When an updated handle is not provided by the server, clients should contiue
+     * using the previous handle provided by `ActionCreatePreparedStatementResonse`.
+     * 
+ * + * optional bytes prepared_statement_handle = 1; + * @return Whether the preparedStatementHandle field is set. + */ + boolean hasPreparedStatementHandle(); + /** + *
+     * Represents a (potentially updated) opaque handle for the prepared statement on the server.
+     * Because the handle could potentially be updated, any previous handles for this prepared
+     * statement should be considered invalid, and all subsequent requests for this prepared
+     * statement must use this new handle.
+     * The updated handle allows implementing query parameters with stateless services.
+     * 
+     * When an updated handle is not provided by the server, clients should contiue
+     * using the previous handle provided by `ActionCreatePreparedStatementResonse`.
+     * 
+ * + * optional bytes prepared_statement_handle = 1; + * @return The preparedStatementHandle. + */ + com.google.protobuf.ByteString getPreparedStatementHandle(); + } + /** + *
+   * An *optional* response returned when `DoPut` is called with `CommandPreparedStatementQuery`.
+   *
+   * *Note on legacy behavior*: previous versions of the protocol did not return any result for
+   * this command, and that behavior should still be supported by clients. In that case, the client
+   * can continue as though the fields in this message were not provided or set to sensible default values.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.DoPutPreparedStatementResult} + */ + public static final class DoPutPreparedStatementResult extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.DoPutPreparedStatementResult) + DoPutPreparedStatementResultOrBuilder { + private static final long serialVersionUID = 0L; + // Use DoPutPreparedStatementResult.newBuilder() to construct. + private DoPutPreparedStatementResult(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private DoPutPreparedStatementResult() { + preparedStatementHandle_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new DoPutPreparedStatementResult(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_DoPutPreparedStatementResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_DoPutPreparedStatementResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult.class, org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult.Builder.class); + } + + private int bitField0_; + public static final int PREPARED_STATEMENT_HANDLE_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString preparedStatementHandle_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     * Represents a (potentially updated) opaque handle for the prepared statement on the server.
+     * Because the handle could potentially be updated, any previous handles for this prepared
+     * statement should be considered invalid, and all subsequent requests for this prepared
+     * statement must use this new handle.
+     * The updated handle allows implementing query parameters with stateless services.
+     * 
+     * When an updated handle is not provided by the server, clients should contiue
+     * using the previous handle provided by `ActionCreatePreparedStatementResonse`.
+     * 
+ * + * optional bytes prepared_statement_handle = 1; + * @return Whether the preparedStatementHandle field is set. + */ + @java.lang.Override + public boolean hasPreparedStatementHandle() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+     * Represents a (potentially updated) opaque handle for the prepared statement on the server.
+     * Because the handle could potentially be updated, any previous handles for this prepared
+     * statement should be considered invalid, and all subsequent requests for this prepared
+     * statement must use this new handle.
+     * The updated handle allows implementing query parameters with stateless services.
+     * 
+     * When an updated handle is not provided by the server, clients should contiue
+     * using the previous handle provided by `ActionCreatePreparedStatementResonse`.
+     * 
+ * + * optional bytes prepared_statement_handle = 1; + * @return The preparedStatementHandle. + */ + @java.lang.Override + public com.google.protobuf.ByteString getPreparedStatementHandle() { + return preparedStatementHandle_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + output.writeBytes(1, preparedStatementHandle_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, preparedStatementHandle_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult other = (org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult) obj; + + if (hasPreparedStatementHandle() != other.hasPreparedStatementHandle()) return false; + if (hasPreparedStatementHandle()) { + if (!getPreparedStatementHandle() + .equals(other.getPreparedStatementHandle())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasPreparedStatementHandle()) { + hash = (37 * hash) + PREPARED_STATEMENT_HANDLE_FIELD_NUMBER; + hash = (53 * hash) + getPreparedStatementHandle().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     * An *optional* response returned when `DoPut` is called with `CommandPreparedStatementQuery`.
+     *
+     * *Note on legacy behavior*: previous versions of the protocol did not return any result for
+     * this command, and that behavior should still be supported by clients. In that case, the client
+     * can continue as though the fields in this message were not provided or set to sensible default values.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.DoPutPreparedStatementResult} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.DoPutPreparedStatementResult) + org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_DoPutPreparedStatementResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_DoPutPreparedStatementResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult.class, org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + preparedStatementHandle_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_DoPutPreparedStatementResult_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult build() { + org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult result = new org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.preparedStatementHandle_ = preparedStatementHandle_; + to_bitField0_ |= 0x00000001; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult.getDefaultInstance()) return this; + if (other.hasPreparedStatementHandle()) { + setPreparedStatementHandle(other.getPreparedStatementHandle()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + preparedStatementHandle_ = input.readBytes(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.ByteString preparedStatementHandle_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * Represents a (potentially updated) opaque handle for the prepared statement on the server.
+       * Because the handle could potentially be updated, any previous handles for this prepared
+       * statement should be considered invalid, and all subsequent requests for this prepared
+       * statement must use this new handle.
+       * The updated handle allows implementing query parameters with stateless services.
+       * 
+       * When an updated handle is not provided by the server, clients should contiue
+       * using the previous handle provided by `ActionCreatePreparedStatementResonse`.
+       * 
+ * + * optional bytes prepared_statement_handle = 1; + * @return Whether the preparedStatementHandle field is set. + */ + @java.lang.Override + public boolean hasPreparedStatementHandle() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+       * Represents a (potentially updated) opaque handle for the prepared statement on the server.
+       * Because the handle could potentially be updated, any previous handles for this prepared
+       * statement should be considered invalid, and all subsequent requests for this prepared
+       * statement must use this new handle.
+       * The updated handle allows implementing query parameters with stateless services.
+       * 
+       * When an updated handle is not provided by the server, clients should contiue
+       * using the previous handle provided by `ActionCreatePreparedStatementResonse`.
+       * 
+ * + * optional bytes prepared_statement_handle = 1; + * @return The preparedStatementHandle. + */ + @java.lang.Override + public com.google.protobuf.ByteString getPreparedStatementHandle() { + return preparedStatementHandle_; + } + /** + *
+       * Represents a (potentially updated) opaque handle for the prepared statement on the server.
+       * Because the handle could potentially be updated, any previous handles for this prepared
+       * statement should be considered invalid, and all subsequent requests for this prepared
+       * statement must use this new handle.
+       * The updated handle allows implementing query parameters with stateless services.
+       * 
+       * When an updated handle is not provided by the server, clients should contiue
+       * using the previous handle provided by `ActionCreatePreparedStatementResonse`.
+       * 
+ * + * optional bytes prepared_statement_handle = 1; + * @param value The preparedStatementHandle to set. + * @return This builder for chaining. + */ + public Builder setPreparedStatementHandle(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + preparedStatementHandle_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * Represents a (potentially updated) opaque handle for the prepared statement on the server.
+       * Because the handle could potentially be updated, any previous handles for this prepared
+       * statement should be considered invalid, and all subsequent requests for this prepared
+       * statement must use this new handle.
+       * The updated handle allows implementing query parameters with stateless services.
+       * 
+       * When an updated handle is not provided by the server, clients should contiue
+       * using the previous handle provided by `ActionCreatePreparedStatementResonse`.
+       * 
+ * + * optional bytes prepared_statement_handle = 1; + * @return This builder for chaining. + */ + public Builder clearPreparedStatementHandle() { + bitField0_ = (bitField0_ & ~0x00000001); + preparedStatementHandle_ = getDefaultInstance().getPreparedStatementHandle(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.DoPutPreparedStatementResult) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.DoPutPreparedStatementResult) + private static final org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public DoPutPreparedStatementResult parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.DoPutPreparedStatementResult getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + @java.lang.Deprecated public interface ActionCancelQueryRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.ActionCancelQueryRequest) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * The result of the GetFlightInfo RPC that initiated the query.
+     * XXX(ARROW-16902): this must be a serialized FlightInfo, but is
+     * rendered as bytes because Protobuf does not really support one
+     * DLL using Protobuf definitions from another DLL.
+     * 
+ * + * bytes info = 1; + * @return The info. + */ + com.google.protobuf.ByteString getInfo(); + } + /** + *
+   *
+   * Request message for the "CancelQuery" action.
+   *
+   * Explicitly cancel a running query.
+   *
+   * This lets a single client explicitly cancel work, no matter how many clients
+   * are involved/whether the query is distributed or not, given server support.
+   * The transaction/statement is not rolled back; it is the application's job to
+   * commit or rollback as appropriate. This only indicates the client no longer
+   * wishes to read the remainder of the query results or continue submitting
+   * data.
+   *
+   * This command is idempotent.
+   *
+   * This command is deprecated since 13.0.0. Use the "CancelFlightInfo"
+   * action with DoAction instead.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionCancelQueryRequest} + */ + @java.lang.Deprecated public static final class ActionCancelQueryRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.ActionCancelQueryRequest) + ActionCancelQueryRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use ActionCancelQueryRequest.newBuilder() to construct. + private ActionCancelQueryRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ActionCancelQueryRequest() { + info_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new ActionCancelQueryRequest(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCancelQueryRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCancelQueryRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest.Builder.class); + } + + public static final int INFO_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString info_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+     * The result of the GetFlightInfo RPC that initiated the query.
+     * XXX(ARROW-16902): this must be a serialized FlightInfo, but is
+     * rendered as bytes because Protobuf does not really support one
+     * DLL using Protobuf definitions from another DLL.
+     * 
+ * + * bytes info = 1; + * @return The info. + */ + @java.lang.Override + public com.google.protobuf.ByteString getInfo() { + return info_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!info_.isEmpty()) { + output.writeBytes(1, info_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!info_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, info_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest other = (org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest) obj; + + if (!getInfo() + .equals(other.getInfo())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + INFO_FIELD_NUMBER; + hash = (53 * hash) + getInfo().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Request message for the "CancelQuery" action.
+     *
+     * Explicitly cancel a running query.
+     *
+     * This lets a single client explicitly cancel work, no matter how many clients
+     * are involved/whether the query is distributed or not, given server support.
+     * The transaction/statement is not rolled back; it is the application's job to
+     * commit or rollback as appropriate. This only indicates the client no longer
+     * wishes to read the remainder of the query results or continue submitting
+     * data.
+     *
+     * This command is idempotent.
+     *
+     * This command is deprecated since 13.0.0. Use the "CancelFlightInfo"
+     * action with DoAction instead.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionCancelQueryRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.ActionCancelQueryRequest) + org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCancelQueryRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCancelQueryRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + info_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCancelQueryRequest_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest build() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest result = new org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.info_ = info_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest.getDefaultInstance()) return this; + if (other.getInfo() != com.google.protobuf.ByteString.EMPTY) { + setInfo(other.getInfo()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + info_ = input.readBytes(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.ByteString info_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * The result of the GetFlightInfo RPC that initiated the query.
+       * XXX(ARROW-16902): this must be a serialized FlightInfo, but is
+       * rendered as bytes because Protobuf does not really support one
+       * DLL using Protobuf definitions from another DLL.
+       * 
+ * + * bytes info = 1; + * @return The info. + */ + @java.lang.Override + public com.google.protobuf.ByteString getInfo() { + return info_; + } + /** + *
+       * The result of the GetFlightInfo RPC that initiated the query.
+       * XXX(ARROW-16902): this must be a serialized FlightInfo, but is
+       * rendered as bytes because Protobuf does not really support one
+       * DLL using Protobuf definitions from another DLL.
+       * 
+ * + * bytes info = 1; + * @param value The info to set. + * @return This builder for chaining. + */ + public Builder setInfo(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + info_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * The result of the GetFlightInfo RPC that initiated the query.
+       * XXX(ARROW-16902): this must be a serialized FlightInfo, but is
+       * rendered as bytes because Protobuf does not really support one
+       * DLL using Protobuf definitions from another DLL.
+       * 
+ * + * bytes info = 1; + * @return This builder for chaining. + */ + public Builder clearInfo() { + bitField0_ = (bitField0_ & ~0x00000001); + info_ = getDefaultInstance().getInfo(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.ActionCancelQueryRequest) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.ActionCancelQueryRequest) + private static final org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ActionCancelQueryRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + @java.lang.Deprecated public interface ActionCancelQueryResultOrBuilder extends + // @@protoc_insertion_point(interface_extends:arrow.flight.protocol.sql.ActionCancelQueryResult) + com.google.protobuf.MessageOrBuilder { + + /** + * .arrow.flight.protocol.sql.ActionCancelQueryResult.CancelResult result = 1; + * @return The enum numeric value on the wire for result. + */ + int getResultValue(); + /** + * .arrow.flight.protocol.sql.ActionCancelQueryResult.CancelResult result = 1; + * @return The result. + */ + org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult.CancelResult getResult(); + } + /** + *
+   *
+   * The result of cancelling a query.
+   *
+   * The result should be wrapped in a google.protobuf.Any message.
+   *
+   * This command is deprecated since 13.0.0. Use the "CancelFlightInfo"
+   * action with DoAction instead.
+   * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionCancelQueryResult} + */ + @java.lang.Deprecated public static final class ActionCancelQueryResult extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:arrow.flight.protocol.sql.ActionCancelQueryResult) + ActionCancelQueryResultOrBuilder { + private static final long serialVersionUID = 0L; + // Use ActionCancelQueryResult.newBuilder() to construct. + private ActionCancelQueryResult(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ActionCancelQueryResult() { + result_ = 0; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new ActionCancelQueryResult(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCancelQueryResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCancelQueryResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult.Builder.class); + } + + /** + * Protobuf enum {@code arrow.flight.protocol.sql.ActionCancelQueryResult.CancelResult} + */ + public enum CancelResult + implements com.google.protobuf.ProtocolMessageEnum { + /** + *
+       * The cancellation status is unknown. Servers should avoid using
+       * this value (send a NOT_FOUND error if the requested query is
+       * not known). Clients can retry the request.
+       * 
+ * + * CANCEL_RESULT_UNSPECIFIED = 0; + */ + CANCEL_RESULT_UNSPECIFIED(0), + /** + *
+       * The cancellation request is complete. Subsequent requests with
+       * the same payload may return CANCELLED or a NOT_FOUND error.
+       * 
+ * + * CANCEL_RESULT_CANCELLED = 1; + */ + CANCEL_RESULT_CANCELLED(1), + /** + *
+       * The cancellation request is in progress. The client may retry
+       * the cancellation request.
+       * 
+ * + * CANCEL_RESULT_CANCELLING = 2; + */ + CANCEL_RESULT_CANCELLING(2), + /** + *
+       * The query is not cancellable. The client should not retry the
+       * cancellation request.
+       * 
+ * + * CANCEL_RESULT_NOT_CANCELLABLE = 3; + */ + CANCEL_RESULT_NOT_CANCELLABLE(3), + UNRECOGNIZED(-1), + ; + + /** + *
+       * The cancellation status is unknown. Servers should avoid using
+       * this value (send a NOT_FOUND error if the requested query is
+       * not known). Clients can retry the request.
+       * 
+ * + * CANCEL_RESULT_UNSPECIFIED = 0; + */ + public static final int CANCEL_RESULT_UNSPECIFIED_VALUE = 0; + /** + *
+       * The cancellation request is complete. Subsequent requests with
+       * the same payload may return CANCELLED or a NOT_FOUND error.
+       * 
+ * + * CANCEL_RESULT_CANCELLED = 1; + */ + public static final int CANCEL_RESULT_CANCELLED_VALUE = 1; + /** + *
+       * The cancellation request is in progress. The client may retry
+       * the cancellation request.
+       * 
+ * + * CANCEL_RESULT_CANCELLING = 2; + */ + public static final int CANCEL_RESULT_CANCELLING_VALUE = 2; + /** + *
+       * The query is not cancellable. The client should not retry the
+       * cancellation request.
+       * 
+ * + * CANCEL_RESULT_NOT_CANCELLABLE = 3; + */ + public static final int CANCEL_RESULT_NOT_CANCELLABLE_VALUE = 3; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static CancelResult valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static CancelResult forNumber(int value) { + switch (value) { + case 0: return CANCEL_RESULT_UNSPECIFIED; + case 1: return CANCEL_RESULT_CANCELLED; + case 2: return CANCEL_RESULT_CANCELLING; + case 3: return CANCEL_RESULT_NOT_CANCELLABLE; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + CancelResult> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public CancelResult findValueByNumber(int number) { + return CancelResult.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult.getDescriptor().getEnumTypes().get(0); + } + + private static final CancelResult[] VALUES = values(); + + public static CancelResult valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private CancelResult(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:arrow.flight.protocol.sql.ActionCancelQueryResult.CancelResult) + } + + public static final int RESULT_FIELD_NUMBER = 1; + private int result_ = 0; + /** + * .arrow.flight.protocol.sql.ActionCancelQueryResult.CancelResult result = 1; + * @return The enum numeric value on the wire for result. + */ + @java.lang.Override public int getResultValue() { + return result_; + } + /** + * .arrow.flight.protocol.sql.ActionCancelQueryResult.CancelResult result = 1; + * @return The result. + */ + @java.lang.Override public org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult.CancelResult getResult() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult.CancelResult result = org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult.CancelResult.forNumber(result_); + return result == null ? org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult.CancelResult.UNRECOGNIZED : result; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (result_ != org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult.CancelResult.CANCEL_RESULT_UNSPECIFIED.getNumber()) { + output.writeEnum(1, result_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (result_ != org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult.CancelResult.CANCEL_RESULT_UNSPECIFIED.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(1, result_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult)) { + return super.equals(obj); + } + org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult other = (org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult) obj; + + if (result_ != other.result_) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + RESULT_FIELD_NUMBER; + hash = (53 * hash) + result_; + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * The result of cancelling a query.
+     *
+     * The result should be wrapped in a google.protobuf.Any message.
+     *
+     * This command is deprecated since 13.0.0. Use the "CancelFlightInfo"
+     * action with DoAction instead.
+     * 
+ * + * Protobuf type {@code arrow.flight.protocol.sql.ActionCancelQueryResult} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:arrow.flight.protocol.sql.ActionCancelQueryResult) + org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCancelQueryResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCancelQueryResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult.class, org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult.Builder.class); + } + + // Construct using org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + result_ = 0; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.internal_static_arrow_flight_protocol_sql_ActionCancelQueryResult_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult getDefaultInstanceForType() { + return org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult build() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult buildPartial() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult result = new org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.result_ = result_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult) { + return mergeFrom((org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult other) { + if (other == org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult.getDefaultInstance()) return this; + if (other.result_ != 0) { + setResultValue(other.getResultValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + result_ = input.readEnum(); + bitField0_ |= 0x00000001; + break; + } // case 8 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private int result_ = 0; + /** + * .arrow.flight.protocol.sql.ActionCancelQueryResult.CancelResult result = 1; + * @return The enum numeric value on the wire for result. + */ + @java.lang.Override public int getResultValue() { + return result_; + } + /** + * .arrow.flight.protocol.sql.ActionCancelQueryResult.CancelResult result = 1; + * @param value The enum numeric value on the wire for result to set. + * @return This builder for chaining. + */ + public Builder setResultValue(int value) { + result_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * .arrow.flight.protocol.sql.ActionCancelQueryResult.CancelResult result = 1; + * @return The result. + */ + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult.CancelResult getResult() { + org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult.CancelResult result = org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult.CancelResult.forNumber(result_); + return result == null ? org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult.CancelResult.UNRECOGNIZED : result; + } + /** + * .arrow.flight.protocol.sql.ActionCancelQueryResult.CancelResult result = 1; + * @param value The result to set. + * @return This builder for chaining. + */ + public Builder setResult(org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult.CancelResult value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + result_ = value.getNumber(); + onChanged(); + return this; + } + /** + * .arrow.flight.protocol.sql.ActionCancelQueryResult.CancelResult result = 1; + * @return This builder for chaining. + */ + public Builder clearResult() { + bitField0_ = (bitField0_ & ~0x00000001); + result_ = 0; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:arrow.flight.protocol.sql.ActionCancelQueryResult) + } + + // @@protoc_insertion_point(class_scope:arrow.flight.protocol.sql.ActionCancelQueryResult) + private static final org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult(); + } + + public static org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ActionCancelQueryResult parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.sql.impl.FlightSql.ActionCancelQueryResult getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public static final int EXPERIMENTAL_FIELD_NUMBER = 1000; + /** + * extend .google.protobuf.MessageOptions { ... } + */ + public static final + com.google.protobuf.GeneratedMessage.GeneratedExtension< + com.google.protobuf.DescriptorProtos.MessageOptions, + java.lang.Boolean> experimental = com.google.protobuf.GeneratedMessage + .newFileScopedGeneratedExtension( + java.lang.Boolean.class, + null); + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_CommandGetSqlInfo_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_CommandGetSqlInfo_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_CommandGetXdbcTypeInfo_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_CommandGetXdbcTypeInfo_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_CommandGetCatalogs_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_CommandGetCatalogs_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_CommandGetDbSchemas_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_CommandGetDbSchemas_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_CommandGetTables_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_CommandGetTables_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_CommandGetTableTypes_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_CommandGetTableTypes_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_CommandGetPrimaryKeys_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_CommandGetPrimaryKeys_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_CommandGetExportedKeys_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_CommandGetExportedKeys_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_CommandGetImportedKeys_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_CommandGetImportedKeys_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_CommandGetCrossReference_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_CommandGetCrossReference_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_ActionCreatePreparedStatementRequest_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_ActionCreatePreparedStatementRequest_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_SubstraitPlan_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_SubstraitPlan_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_ActionCreatePreparedSubstraitPlanRequest_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_ActionCreatePreparedSubstraitPlanRequest_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_ActionCreatePreparedStatementResult_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_ActionCreatePreparedStatementResult_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_ActionClosePreparedStatementRequest_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_ActionClosePreparedStatementRequest_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_ActionBeginTransactionRequest_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_ActionBeginTransactionRequest_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_ActionBeginSavepointRequest_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_ActionBeginSavepointRequest_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_ActionBeginTransactionResult_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_ActionBeginTransactionResult_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_ActionBeginSavepointResult_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_ActionBeginSavepointResult_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_ActionEndTransactionRequest_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_ActionEndTransactionRequest_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_ActionEndSavepointRequest_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_ActionEndSavepointRequest_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_CommandStatementQuery_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_CommandStatementQuery_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_CommandStatementSubstraitPlan_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_CommandStatementSubstraitPlan_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_TicketStatementQuery_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_TicketStatementQuery_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_CommandPreparedStatementQuery_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_CommandPreparedStatementQuery_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_CommandStatementUpdate_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_CommandStatementUpdate_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_CommandPreparedStatementUpdate_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_CommandPreparedStatementUpdate_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_DoPutUpdateResult_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_DoPutUpdateResult_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_DoPutPreparedStatementResult_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_DoPutPreparedStatementResult_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_ActionCancelQueryRequest_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_ActionCancelQueryRequest_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_arrow_flight_protocol_sql_ActionCancelQueryResult_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_arrow_flight_protocol_sql_ActionCancelQueryResult_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\017FlightSql.proto\022\031arrow.flight.protocol" + + ".sql\032 google/protobuf/descriptor.proto\"&" + + "\n\021CommandGetSqlInfo\022\014\n\004info\030\001 \003(\r:\003\300>\001\"C" + + "\n\026CommandGetXdbcTypeInfo\022\026\n\tdata_type\030\001 " + + "\001(\005H\000\210\001\001:\003\300>\001B\014\n\n_data_type\"\031\n\022CommandGe" + + "tCatalogs:\003\300>\001\"\200\001\n\023CommandGetDbSchemas\022\024" + + "\n\007catalog\030\001 \001(\tH\000\210\001\001\022%\n\030db_schema_filter" + + "_pattern\030\002 \001(\tH\001\210\001\001:\003\300>\001B\n\n\010_catalogB\033\n\031" + + "_db_schema_filter_pattern\"\360\001\n\020CommandGet" + + "Tables\022\024\n\007catalog\030\001 \001(\tH\000\210\001\001\022%\n\030db_schem" + + "a_filter_pattern\030\002 \001(\tH\001\210\001\001\022&\n\031table_nam" + + "e_filter_pattern\030\003 \001(\tH\002\210\001\001\022\023\n\013table_typ" + + "es\030\004 \003(\t\022\026\n\016include_schema\030\005 \001(\010:\003\300>\001B\n\n" + + "\010_catalogB\033\n\031_db_schema_filter_patternB\034" + + "\n\032_table_name_filter_pattern\"\033\n\024CommandG" + + "etTableTypes:\003\300>\001\"s\n\025CommandGetPrimaryKe" + + "ys\022\024\n\007catalog\030\001 \001(\tH\000\210\001\001\022\026\n\tdb_schema\030\002 " + + "\001(\tH\001\210\001\001\022\r\n\005table\030\003 \001(\t:\003\300>\001B\n\n\010_catalog" + + "B\014\n\n_db_schema\"t\n\026CommandGetExportedKeys" + + "\022\024\n\007catalog\030\001 \001(\tH\000\210\001\001\022\026\n\tdb_schema\030\002 \001(" + + "\tH\001\210\001\001\022\r\n\005table\030\003 \001(\t:\003\300>\001B\n\n\010_catalogB\014" + + "\n\n_db_schema\"t\n\026CommandGetImportedKeys\022\024" + + "\n\007catalog\030\001 \001(\tH\000\210\001\001\022\026\n\tdb_schema\030\002 \001(\tH" + + "\001\210\001\001\022\r\n\005table\030\003 \001(\t:\003\300>\001B\n\n\010_catalogB\014\n\n" + + "_db_schema\"\353\001\n\030CommandGetCrossReference\022" + + "\027\n\npk_catalog\030\001 \001(\tH\000\210\001\001\022\031\n\014pk_db_schema" + + "\030\002 \001(\tH\001\210\001\001\022\020\n\010pk_table\030\003 \001(\t\022\027\n\nfk_cata" + + "log\030\004 \001(\tH\002\210\001\001\022\031\n\014fk_db_schema\030\005 \001(\tH\003\210\001" + + "\001\022\020\n\010fk_table\030\006 \001(\t:\003\300>\001B\r\n\013_pk_catalogB" + + "\017\n\r_pk_db_schemaB\r\n\013_fk_catalogB\017\n\r_fk_d" + + "b_schema\"j\n$ActionCreatePreparedStatemen" + + "tRequest\022\r\n\005query\030\001 \001(\t\022\033\n\016transaction_i" + + "d\030\002 \001(\014H\000\210\001\001:\003\300>\001B\021\n\017_transaction_id\"3\n\r" + + "SubstraitPlan\022\014\n\004plan\030\001 \001(\014\022\017\n\007version\030\002" + + " \001(\t:\003\300>\001\"\227\001\n(ActionCreatePreparedSubstr" + + "aitPlanRequest\0226\n\004plan\030\001 \001(\0132(.arrow.fli" + + "ght.protocol.sql.SubstraitPlan\022\033\n\016transa" + + "ction_id\030\002 \001(\014H\000\210\001\001:\003\300>\001B\021\n\017_transaction" + + "_id\"\177\n#ActionCreatePreparedStatementResu" + + "lt\022!\n\031prepared_statement_handle\030\001 \001(\014\022\026\n" + + "\016dataset_schema\030\002 \001(\014\022\030\n\020parameter_schem" + + "a\030\003 \001(\014:\003\300>\001\"M\n#ActionClosePreparedState" + + "mentRequest\022!\n\031prepared_statement_handle" + + "\030\001 \001(\014:\003\300>\001\"$\n\035ActionBeginTransactionReq" + + "uest:\003\300>\001\"H\n\033ActionBeginSavepointRequest" + + "\022\026\n\016transaction_id\030\001 \001(\014\022\014\n\004name\030\002 \001(\t:\003" + + "\300>\001\";\n\034ActionBeginTransactionResult\022\026\n\016t" + + "ransaction_id\030\001 \001(\014:\003\300>\001\"7\n\032ActionBeginS" + + "avepointResult\022\024\n\014savepoint_id\030\001 \001(\014:\003\300>" + + "\001\"\376\001\n\033ActionEndTransactionRequest\022\026\n\016tra" + + "nsaction_id\030\001 \001(\014\022U\n\006action\030\002 \001(\0162E.arro" + + "w.flight.protocol.sql.ActionEndTransacti" + + "onRequest.EndTransaction\"k\n\016EndTransacti" + + "on\022\037\n\033END_TRANSACTION_UNSPECIFIED\020\000\022\032\n\026E" + + "ND_TRANSACTION_COMMIT\020\001\022\034\n\030END_TRANSACTI" + + "ON_ROLLBACK\020\002:\003\300>\001\"\357\001\n\031ActionEndSavepoin" + + "tRequest\022\024\n\014savepoint_id\030\001 \001(\014\022Q\n\006action" + + "\030\002 \001(\0162A.arrow.flight.protocol.sql.Actio" + + "nEndSavepointRequest.EndSavepoint\"d\n\014End" + + "Savepoint\022\035\n\031END_SAVEPOINT_UNSPECIFIED\020\000" + + "\022\031\n\025END_SAVEPOINT_RELEASE\020\001\022\032\n\026END_SAVEP" + + "OINT_ROLLBACK\020\002:\003\300>\001\"[\n\025CommandStatement" + + "Query\022\r\n\005query\030\001 \001(\t\022\033\n\016transaction_id\030\002" + + " \001(\014H\000\210\001\001:\003\300>\001B\021\n\017_transaction_id\"\214\001\n\035Co" + + "mmandStatementSubstraitPlan\0226\n\004plan\030\001 \001(" + + "\0132(.arrow.flight.protocol.sql.SubstraitP" + + "lan\022\033\n\016transaction_id\030\002 \001(\014H\000\210\001\001:\003\300>\001B\021\n" + + "\017_transaction_id\"5\n\024TicketStatementQuery" + + "\022\030\n\020statement_handle\030\001 \001(\014:\003\300>\001\"G\n\035Comma" + + "ndPreparedStatementQuery\022!\n\031prepared_sta" + + "tement_handle\030\001 \001(\014:\003\300>\001\"\\\n\026CommandState" + + "mentUpdate\022\r\n\005query\030\001 \001(\t\022\033\n\016transaction" + + "_id\030\002 \001(\014H\000\210\001\001:\003\300>\001B\021\n\017_transaction_id\"H" + + "\n\036CommandPreparedStatementUpdate\022!\n\031prep" + + "ared_statement_handle\030\001 \001(\014:\003\300>\001\".\n\021DoPu" + + "tUpdateResult\022\024\n\014record_count\030\001 \001(\003:\003\300>\001" + + "\"i\n\034DoPutPreparedStatementResult\022&\n\031prep" + + "ared_statement_handle\030\001 \001(\014H\000\210\001\001:\003\300>\001B\034\n" + + "\032_prepared_statement_handle\"/\n\030ActionCan" + + "celQueryRequest\022\014\n\004info\030\001 \001(\014:\005\030\001\300>\001\"\377\001\n" + + "\027ActionCancelQueryResult\022O\n\006result\030\001 \001(\016" + + "2?.arrow.flight.protocol.sql.ActionCance" + + "lQueryResult.CancelResult\"\213\001\n\014CancelResu" + + "lt\022\035\n\031CANCEL_RESULT_UNSPECIFIED\020\000\022\033\n\027CAN" + + "CEL_RESULT_CANCELLED\020\001\022\034\n\030CANCEL_RESULT_" + + "CANCELLING\020\002\022!\n\035CANCEL_RESULT_NOT_CANCEL" + + "LABLE\020\003:\005\030\001\300>\001*\267\030\n\007SqlInfo\022\032\n\026FLIGHT_SQL" + + "_SERVER_NAME\020\000\022\035\n\031FLIGHT_SQL_SERVER_VERS" + + "ION\020\001\022#\n\037FLIGHT_SQL_SERVER_ARROW_VERSION" + + "\020\002\022\037\n\033FLIGHT_SQL_SERVER_READ_ONLY\020\003\022\031\n\025F" + + "LIGHT_SQL_SERVER_SQL\020\004\022\037\n\033FLIGHT_SQL_SER" + + "VER_SUBSTRAIT\020\005\022+\n\'FLIGHT_SQL_SERVER_SUB" + + "STRAIT_MIN_VERSION\020\006\022+\n\'FLIGHT_SQL_SERVE" + + "R_SUBSTRAIT_MAX_VERSION\020\007\022!\n\035FLIGHT_SQL_" + + "SERVER_TRANSACTION\020\010\022\034\n\030FLIGHT_SQL_SERVE" + + "R_CANCEL\020\t\022\'\n#FLIGHT_SQL_SERVER_STATEMEN" + + "T_TIMEOUT\020d\022)\n%FLIGHT_SQL_SERVER_TRANSAC" + + "TION_TIMEOUT\020e\022\024\n\017SQL_DDL_CATALOG\020\364\003\022\023\n\016" + + "SQL_DDL_SCHEMA\020\365\003\022\022\n\rSQL_DDL_TABLE\020\366\003\022\030\n" + + "\023SQL_IDENTIFIER_CASE\020\367\003\022\036\n\031SQL_IDENTIFIE" + + "R_QUOTE_CHAR\020\370\003\022\037\n\032SQL_QUOTED_IDENTIFIER" + + "_CASE\020\371\003\022\"\n\035SQL_ALL_TABLES_ARE_SELECTABL" + + "E\020\372\003\022\026\n\021SQL_NULL_ORDERING\020\373\003\022\021\n\014SQL_KEYW" + + "ORDS\020\374\003\022\032\n\025SQL_NUMERIC_FUNCTIONS\020\375\003\022\031\n\024S" + + "QL_STRING_FUNCTIONS\020\376\003\022\031\n\024SQL_SYSTEM_FUN" + + "CTIONS\020\377\003\022\033\n\026SQL_DATETIME_FUNCTIONS\020\200\004\022\035" + + "\n\030SQL_SEARCH_STRING_ESCAPE\020\201\004\022\036\n\031SQL_EXT" + + "RA_NAME_CHARACTERS\020\202\004\022!\n\034SQL_SUPPORTS_CO" + + "LUMN_ALIASING\020\203\004\022\037\n\032SQL_NULL_PLUS_NULL_I" + + "S_NULL\020\204\004\022\031\n\024SQL_SUPPORTS_CONVERT\020\205\004\022)\n$" + + "SQL_SUPPORTS_TABLE_CORRELATION_NAMES\020\206\004\022" + + "3\n.SQL_SUPPORTS_DIFFERENT_TABLE_CORRELAT" + + "ION_NAMES\020\207\004\022)\n$SQL_SUPPORTS_EXPRESSIONS" + + "_IN_ORDER_BY\020\210\004\022$\n\037SQL_SUPPORTS_ORDER_BY" + + "_UNRELATED\020\211\004\022\033\n\026SQL_SUPPORTED_GROUP_BY\020" + + "\212\004\022$\n\037SQL_SUPPORTS_LIKE_ESCAPE_CLAUSE\020\213\004" + + "\022&\n!SQL_SUPPORTS_NON_NULLABLE_COLUMNS\020\214\004" + + "\022\032\n\025SQL_SUPPORTED_GRAMMAR\020\215\004\022\037\n\032SQL_ANSI" + + "92_SUPPORTED_LEVEL\020\216\004\0220\n+SQL_SUPPORTS_IN" + + "TEGRITY_ENHANCEMENT_FACILITY\020\217\004\022\"\n\035SQL_O" + + "UTER_JOINS_SUPPORT_LEVEL\020\220\004\022\024\n\017SQL_SCHEM" + + "A_TERM\020\221\004\022\027\n\022SQL_PROCEDURE_TERM\020\222\004\022\025\n\020SQ" + + "L_CATALOG_TERM\020\223\004\022\031\n\024SQL_CATALOG_AT_STAR" + + "T\020\224\004\022\"\n\035SQL_SCHEMAS_SUPPORTED_ACTIONS\020\225\004" + + "\022#\n\036SQL_CATALOGS_SUPPORTED_ACTIONS\020\226\004\022&\n" + + "!SQL_SUPPORTED_POSITIONED_COMMANDS\020\227\004\022$\n" + + "\037SQL_SELECT_FOR_UPDATE_SUPPORTED\020\230\004\022$\n\037S" + + "QL_STORED_PROCEDURES_SUPPORTED\020\231\004\022\035\n\030SQL" + + "_SUPPORTED_SUBQUERIES\020\232\004\022(\n#SQL_CORRELAT" + + "ED_SUBQUERIES_SUPPORTED\020\233\004\022\031\n\024SQL_SUPPOR" + + "TED_UNIONS\020\234\004\022\"\n\035SQL_MAX_BINARY_LITERAL_" + + "LENGTH\020\235\004\022 \n\033SQL_MAX_CHAR_LITERAL_LENGTH" + + "\020\236\004\022\037\n\032SQL_MAX_COLUMN_NAME_LENGTH\020\237\004\022 \n\033" + + "SQL_MAX_COLUMNS_IN_GROUP_BY\020\240\004\022\035\n\030SQL_MA" + + "X_COLUMNS_IN_INDEX\020\241\004\022 \n\033SQL_MAX_COLUMNS" + + "_IN_ORDER_BY\020\242\004\022\036\n\031SQL_MAX_COLUMNS_IN_SE" + + "LECT\020\243\004\022\035\n\030SQL_MAX_COLUMNS_IN_TABLE\020\244\004\022\030" + + "\n\023SQL_MAX_CONNECTIONS\020\245\004\022\037\n\032SQL_MAX_CURS" + + "OR_NAME_LENGTH\020\246\004\022\031\n\024SQL_MAX_INDEX_LENGT" + + "H\020\247\004\022\036\n\031SQL_DB_SCHEMA_NAME_LENGTH\020\250\004\022\"\n\035" + + "SQL_MAX_PROCEDURE_NAME_LENGTH\020\251\004\022 \n\033SQL_" + + "MAX_CATALOG_NAME_LENGTH\020\252\004\022\025\n\020SQL_MAX_RO" + + "W_SIZE\020\253\004\022$\n\037SQL_MAX_ROW_SIZE_INCLUDES_B" + + "LOBS\020\254\004\022\035\n\030SQL_MAX_STATEMENT_LENGTH\020\255\004\022\027" + + "\n\022SQL_MAX_STATEMENTS\020\256\004\022\036\n\031SQL_MAX_TABLE" + + "_NAME_LENGTH\020\257\004\022\035\n\030SQL_MAX_TABLES_IN_SEL" + + "ECT\020\260\004\022\034\n\027SQL_MAX_USERNAME_LENGTH\020\261\004\022&\n!" + + "SQL_DEFAULT_TRANSACTION_ISOLATION\020\262\004\022\037\n\032" + + "SQL_TRANSACTIONS_SUPPORTED\020\263\004\0220\n+SQL_SUP" + + "PORTED_TRANSACTIONS_ISOLATION_LEVELS\020\264\004\022" + + "2\n-SQL_DATA_DEFINITION_CAUSES_TRANSACTIO" + + "N_COMMIT\020\265\004\0221\n,SQL_DATA_DEFINITIONS_IN_T" + + "RANSACTIONS_IGNORED\020\266\004\022#\n\036SQL_SUPPORTED_" + + "RESULT_SET_TYPES\020\267\004\022;\n6SQL_SUPPORTED_CON" + + "CURRENCIES_FOR_RESULT_SET_UNSPECIFIED\020\270\004" + + "\022<\n7SQL_SUPPORTED_CONCURRENCIES_FOR_RESU" + + "LT_SET_FORWARD_ONLY\020\271\004\022@\n;SQL_SUPPORTED_" + + "CONCURRENCIES_FOR_RESULT_SET_SCROLL_SENS" + + "ITIVE\020\272\004\022B\n=SQL_SUPPORTED_CONCURRENCIES_" + + "FOR_RESULT_SET_SCROLL_INSENSITIVE\020\273\004\022 \n\033" + + "SQL_BATCH_UPDATES_SUPPORTED\020\274\004\022\035\n\030SQL_SA" + + "VEPOINTS_SUPPORTED\020\275\004\022#\n\036SQL_NAMED_PARAM" + + "ETERS_SUPPORTED\020\276\004\022\035\n\030SQL_LOCATORS_UPDAT" + + "E_COPY\020\277\004\0225\n0SQL_STORED_FUNCTIONS_USING_" + + "CALL_SYNTAX_SUPPORTED\020\300\004*\221\001\n\027SqlSupporte" + + "dTransaction\022\"\n\036SQL_SUPPORTED_TRANSACTIO" + + "N_NONE\020\000\022)\n%SQL_SUPPORTED_TRANSACTION_TR" + + "ANSACTION\020\001\022\'\n#SQL_SUPPORTED_TRANSACTION" + + "_SAVEPOINT\020\002*\262\001\n\033SqlSupportedCaseSensiti" + + "vity\022 \n\034SQL_CASE_SENSITIVITY_UNKNOWN\020\000\022)" + + "\n%SQL_CASE_SENSITIVITY_CASE_INSENSITIVE\020" + + "\001\022\"\n\036SQL_CASE_SENSITIVITY_UPPERCASE\020\002\022\"\n" + + "\036SQL_CASE_SENSITIVITY_LOWERCASE\020\003*\202\001\n\017Sq" + + "lNullOrdering\022\031\n\025SQL_NULLS_SORTED_HIGH\020\000" + + "\022\030\n\024SQL_NULLS_SORTED_LOW\020\001\022\035\n\031SQL_NULLS_" + + "SORTED_AT_START\020\002\022\033\n\027SQL_NULLS_SORTED_AT" + + "_END\020\003*^\n\023SupportedSqlGrammar\022\027\n\023SQL_MIN" + + "IMUM_GRAMMAR\020\000\022\024\n\020SQL_CORE_GRAMMAR\020\001\022\030\n\024" + + "SQL_EXTENDED_GRAMMAR\020\002*h\n\036SupportedAnsi9" + + "2SqlGrammarLevel\022\024\n\020ANSI92_ENTRY_SQL\020\000\022\033" + + "\n\027ANSI92_INTERMEDIATE_SQL\020\001\022\023\n\017ANSI92_FU" + + "LL_SQL\020\002*m\n\031SqlOuterJoinsSupportLevel\022\031\n" + + "\025SQL_JOINS_UNSUPPORTED\020\000\022\033\n\027SQL_LIMITED_" + + "OUTER_JOINS\020\001\022\030\n\024SQL_FULL_OUTER_JOINS\020\002*" + + "Q\n\023SqlSupportedGroupBy\022\032\n\026SQL_GROUP_BY_U" + + "NRELATED\020\000\022\036\n\032SQL_GROUP_BY_BEYOND_SELECT" + + "\020\001*\220\001\n\032SqlSupportedElementActions\022\"\n\036SQL" + + "_ELEMENT_IN_PROCEDURE_CALLS\020\000\022$\n SQL_ELE" + + "MENT_IN_INDEX_DEFINITIONS\020\001\022(\n$SQL_ELEME" + + "NT_IN_PRIVILEGE_DEFINITIONS\020\002*V\n\036SqlSupp" + + "ortedPositionedCommands\022\031\n\025SQL_POSITIONE" + + "D_DELETE\020\000\022\031\n\025SQL_POSITIONED_UPDATE\020\001*\227\001" + + "\n\026SqlSupportedSubqueries\022!\n\035SQL_SUBQUERI" + + "ES_IN_COMPARISONS\020\000\022\034\n\030SQL_SUBQUERIES_IN" + + "_EXISTS\020\001\022\031\n\025SQL_SUBQUERIES_IN_INS\020\002\022!\n\035" + + "SQL_SUBQUERIES_IN_QUANTIFIEDS\020\003*6\n\022SqlSu" + + "pportedUnions\022\r\n\tSQL_UNION\020\000\022\021\n\rSQL_UNIO" + + "N_ALL\020\001*\311\001\n\034SqlTransactionIsolationLevel" + + "\022\030\n\024SQL_TRANSACTION_NONE\020\000\022$\n SQL_TRANSA" + + "CTION_READ_UNCOMMITTED\020\001\022\"\n\036SQL_TRANSACT" + + "ION_READ_COMMITTED\020\002\022#\n\037SQL_TRANSACTION_" + + "REPEATABLE_READ\020\003\022 \n\034SQL_TRANSACTION_SER" + + "IALIZABLE\020\004*\211\001\n\030SqlSupportedTransactions" + + "\022\037\n\033SQL_TRANSACTION_UNSPECIFIED\020\000\022$\n SQL" + + "_DATA_DEFINITION_TRANSACTIONS\020\001\022&\n\"SQL_D" + + "ATA_MANIPULATION_TRANSACTIONS\020\002*\274\001\n\031SqlS" + + "upportedResultSetType\022#\n\037SQL_RESULT_SET_" + + "TYPE_UNSPECIFIED\020\000\022$\n SQL_RESULT_SET_TYP" + + "E_FORWARD_ONLY\020\001\022*\n&SQL_RESULT_SET_TYPE_" + + "SCROLL_INSENSITIVE\020\002\022(\n$SQL_RESULT_SET_T" + + "YPE_SCROLL_SENSITIVE\020\003*\242\001\n SqlSupportedR" + + "esultSetConcurrency\022*\n&SQL_RESULT_SET_CO" + + "NCURRENCY_UNSPECIFIED\020\000\022(\n$SQL_RESULT_SE" + + "T_CONCURRENCY_READ_ONLY\020\001\022(\n$SQL_RESULT_" + + "SET_CONCURRENCY_UPDATABLE\020\002*\231\004\n\022SqlSuppo" + + "rtsConvert\022\026\n\022SQL_CONVERT_BIGINT\020\000\022\026\n\022SQ" + + "L_CONVERT_BINARY\020\001\022\023\n\017SQL_CONVERT_BIT\020\002\022" + + "\024\n\020SQL_CONVERT_CHAR\020\003\022\024\n\020SQL_CONVERT_DAT" + + "E\020\004\022\027\n\023SQL_CONVERT_DECIMAL\020\005\022\025\n\021SQL_CONV" + + "ERT_FLOAT\020\006\022\027\n\023SQL_CONVERT_INTEGER\020\007\022!\n\035" + + "SQL_CONVERT_INTERVAL_DAY_TIME\020\010\022#\n\037SQL_C" + + "ONVERT_INTERVAL_YEAR_MONTH\020\t\022\035\n\031SQL_CONV" + + "ERT_LONGVARBINARY\020\n\022\033\n\027SQL_CONVERT_LONGV" + + "ARCHAR\020\013\022\027\n\023SQL_CONVERT_NUMERIC\020\014\022\024\n\020SQL" + + "_CONVERT_REAL\020\r\022\030\n\024SQL_CONVERT_SMALLINT\020" + + "\016\022\024\n\020SQL_CONVERT_TIME\020\017\022\031\n\025SQL_CONVERT_T" + + "IMESTAMP\020\020\022\027\n\023SQL_CONVERT_TINYINT\020\021\022\031\n\025S" + + "QL_CONVERT_VARBINARY\020\022\022\027\n\023SQL_CONVERT_VA" + + "RCHAR\020\023*\217\004\n\014XdbcDataType\022\025\n\021XDBC_UNKNOWN" + + "_TYPE\020\000\022\r\n\tXDBC_CHAR\020\001\022\020\n\014XDBC_NUMERIC\020\002" + + "\022\020\n\014XDBC_DECIMAL\020\003\022\020\n\014XDBC_INTEGER\020\004\022\021\n\r" + + "XDBC_SMALLINT\020\005\022\016\n\nXDBC_FLOAT\020\006\022\r\n\tXDBC_" + + "REAL\020\007\022\017\n\013XDBC_DOUBLE\020\010\022\021\n\rXDBC_DATETIME" + + "\020\t\022\021\n\rXDBC_INTERVAL\020\n\022\020\n\014XDBC_VARCHAR\020\014\022" + + "\r\n\tXDBC_DATE\020[\022\r\n\tXDBC_TIME\020\\\022\022\n\016XDBC_TI" + + "MESTAMP\020]\022\035\n\020XDBC_LONGVARCHAR\020\377\377\377\377\377\377\377\377\377\001" + + "\022\030\n\013XDBC_BINARY\020\376\377\377\377\377\377\377\377\377\001\022\033\n\016XDBC_VARBI" + + "NARY\020\375\377\377\377\377\377\377\377\377\001\022\037\n\022XDBC_LONGVARBINARY\020\374\377" + + "\377\377\377\377\377\377\377\001\022\030\n\013XDBC_BIGINT\020\373\377\377\377\377\377\377\377\377\001\022\031\n\014XD" + + "BC_TINYINT\020\372\377\377\377\377\377\377\377\377\001\022\025\n\010XDBC_BIT\020\371\377\377\377\377\377" + + "\377\377\377\001\022\027\n\nXDBC_WCHAR\020\370\377\377\377\377\377\377\377\377\001\022\032\n\rXDBC_WV" + + "ARCHAR\020\367\377\377\377\377\377\377\377\377\001*\243\010\n\023XdbcDatetimeSubcod" + + "e\022\030\n\024XDBC_SUBCODE_UNKNOWN\020\000\022\025\n\021XDBC_SUBC" + + "ODE_YEAR\020\001\022\025\n\021XDBC_SUBCODE_DATE\020\001\022\025\n\021XDB" + + "C_SUBCODE_TIME\020\002\022\026\n\022XDBC_SUBCODE_MONTH\020\002" + + "\022\032\n\026XDBC_SUBCODE_TIMESTAMP\020\003\022\024\n\020XDBC_SUB" + + "CODE_DAY\020\003\022#\n\037XDBC_SUBCODE_TIME_WITH_TIM" + + "EZONE\020\004\022\025\n\021XDBC_SUBCODE_HOUR\020\004\022(\n$XDBC_S" + + "UBCODE_TIMESTAMP_WITH_TIMEZONE\020\005\022\027\n\023XDBC" + + "_SUBCODE_MINUTE\020\005\022\027\n\023XDBC_SUBCODE_SECOND" + + "\020\006\022\036\n\032XDBC_SUBCODE_YEAR_TO_MONTH\020\007\022\034\n\030XD" + + "BC_SUBCODE_DAY_TO_HOUR\020\010\022\036\n\032XDBC_SUBCODE" + + "_DAY_TO_MINUTE\020\t\022\036\n\032XDBC_SUBCODE_DAY_TO_" + + "SECOND\020\n\022\037\n\033XDBC_SUBCODE_HOUR_TO_MINUTE\020" + + "\013\022\037\n\033XDBC_SUBCODE_HOUR_TO_SECOND\020\014\022!\n\035XD" + + "BC_SUBCODE_MINUTE_TO_SECOND\020\r\022\036\n\032XDBC_SU" + + "BCODE_INTERVAL_YEAR\020e\022\037\n\033XDBC_SUBCODE_IN" + + "TERVAL_MONTH\020f\022\035\n\031XDBC_SUBCODE_INTERVAL_" + + "DAY\020g\022\036\n\032XDBC_SUBCODE_INTERVAL_HOUR\020h\022 \n" + + "\034XDBC_SUBCODE_INTERVAL_MINUTE\020i\022 \n\034XDBC_" + + "SUBCODE_INTERVAL_SECOND\020j\022\'\n#XDBC_SUBCOD" + + "E_INTERVAL_YEAR_TO_MONTH\020k\022%\n!XDBC_SUBCO" + + "DE_INTERVAL_DAY_TO_HOUR\020l\022\'\n#XDBC_SUBCOD" + + "E_INTERVAL_DAY_TO_MINUTE\020m\022\'\n#XDBC_SUBCO" + + "DE_INTERVAL_DAY_TO_SECOND\020n\022(\n$XDBC_SUBC" + + "ODE_INTERVAL_HOUR_TO_MINUTE\020o\022(\n$XDBC_SU" + + "BCODE_INTERVAL_HOUR_TO_SECOND\020p\022*\n&XDBC_" + + "SUBCODE_INTERVAL_MINUTE_TO_SECOND\020q\032\002\020\001*" + + "W\n\010Nullable\022\030\n\024NULLABILITY_NO_NULLS\020\000\022\030\n" + + "\024NULLABILITY_NULLABLE\020\001\022\027\n\023NULLABILITY_U" + + "NKNOWN\020\002*a\n\nSearchable\022\023\n\017SEARCHABLE_NON" + + "E\020\000\022\023\n\017SEARCHABLE_CHAR\020\001\022\024\n\020SEARCHABLE_B" + + "ASIC\020\002\022\023\n\017SEARCHABLE_FULL\020\003*\\\n\021UpdateDel" + + "eteRules\022\013\n\007CASCADE\020\000\022\014\n\010RESTRICT\020\001\022\014\n\010S" + + "ET_NULL\020\002\022\r\n\tNO_ACTION\020\003\022\017\n\013SET_DEFAULT\020" + + "\004:6\n\014experimental\022\037.google.protobuf.Mess" + + "ageOptions\030\350\007 \001(\010BV\n org.apache.arrow.fl" + + "ight.sql.implZ2github.com/apache/arrow/g" + + "o/arrow/flight/gen/flightb\006proto3" + }; + descriptor = com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + com.google.protobuf.DescriptorProtos.getDescriptor(), + }); + internal_static_arrow_flight_protocol_sql_CommandGetSqlInfo_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_arrow_flight_protocol_sql_CommandGetSqlInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_CommandGetSqlInfo_descriptor, + new java.lang.String[] { "Info", }); + internal_static_arrow_flight_protocol_sql_CommandGetXdbcTypeInfo_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_arrow_flight_protocol_sql_CommandGetXdbcTypeInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_CommandGetXdbcTypeInfo_descriptor, + new java.lang.String[] { "DataType", "DataType", }); + internal_static_arrow_flight_protocol_sql_CommandGetCatalogs_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_arrow_flight_protocol_sql_CommandGetCatalogs_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_CommandGetCatalogs_descriptor, + new java.lang.String[] { }); + internal_static_arrow_flight_protocol_sql_CommandGetDbSchemas_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_arrow_flight_protocol_sql_CommandGetDbSchemas_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_CommandGetDbSchemas_descriptor, + new java.lang.String[] { "Catalog", "DbSchemaFilterPattern", "Catalog", "DbSchemaFilterPattern", }); + internal_static_arrow_flight_protocol_sql_CommandGetTables_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_arrow_flight_protocol_sql_CommandGetTables_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_CommandGetTables_descriptor, + new java.lang.String[] { "Catalog", "DbSchemaFilterPattern", "TableNameFilterPattern", "TableTypes", "IncludeSchema", "Catalog", "DbSchemaFilterPattern", "TableNameFilterPattern", }); + internal_static_arrow_flight_protocol_sql_CommandGetTableTypes_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_arrow_flight_protocol_sql_CommandGetTableTypes_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_CommandGetTableTypes_descriptor, + new java.lang.String[] { }); + internal_static_arrow_flight_protocol_sql_CommandGetPrimaryKeys_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_arrow_flight_protocol_sql_CommandGetPrimaryKeys_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_CommandGetPrimaryKeys_descriptor, + new java.lang.String[] { "Catalog", "DbSchema", "Table", "Catalog", "DbSchema", }); + internal_static_arrow_flight_protocol_sql_CommandGetExportedKeys_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_arrow_flight_protocol_sql_CommandGetExportedKeys_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_CommandGetExportedKeys_descriptor, + new java.lang.String[] { "Catalog", "DbSchema", "Table", "Catalog", "DbSchema", }); + internal_static_arrow_flight_protocol_sql_CommandGetImportedKeys_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_arrow_flight_protocol_sql_CommandGetImportedKeys_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_CommandGetImportedKeys_descriptor, + new java.lang.String[] { "Catalog", "DbSchema", "Table", "Catalog", "DbSchema", }); + internal_static_arrow_flight_protocol_sql_CommandGetCrossReference_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_arrow_flight_protocol_sql_CommandGetCrossReference_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_CommandGetCrossReference_descriptor, + new java.lang.String[] { "PkCatalog", "PkDbSchema", "PkTable", "FkCatalog", "FkDbSchema", "FkTable", "PkCatalog", "PkDbSchema", "FkCatalog", "FkDbSchema", }); + internal_static_arrow_flight_protocol_sql_ActionCreatePreparedStatementRequest_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_arrow_flight_protocol_sql_ActionCreatePreparedStatementRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_ActionCreatePreparedStatementRequest_descriptor, + new java.lang.String[] { "Query", "TransactionId", "TransactionId", }); + internal_static_arrow_flight_protocol_sql_SubstraitPlan_descriptor = + getDescriptor().getMessageTypes().get(11); + internal_static_arrow_flight_protocol_sql_SubstraitPlan_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_SubstraitPlan_descriptor, + new java.lang.String[] { "Plan", "Version", }); + internal_static_arrow_flight_protocol_sql_ActionCreatePreparedSubstraitPlanRequest_descriptor = + getDescriptor().getMessageTypes().get(12); + internal_static_arrow_flight_protocol_sql_ActionCreatePreparedSubstraitPlanRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_ActionCreatePreparedSubstraitPlanRequest_descriptor, + new java.lang.String[] { "Plan", "TransactionId", "TransactionId", }); + internal_static_arrow_flight_protocol_sql_ActionCreatePreparedStatementResult_descriptor = + getDescriptor().getMessageTypes().get(13); + internal_static_arrow_flight_protocol_sql_ActionCreatePreparedStatementResult_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_ActionCreatePreparedStatementResult_descriptor, + new java.lang.String[] { "PreparedStatementHandle", "DatasetSchema", "ParameterSchema", }); + internal_static_arrow_flight_protocol_sql_ActionClosePreparedStatementRequest_descriptor = + getDescriptor().getMessageTypes().get(14); + internal_static_arrow_flight_protocol_sql_ActionClosePreparedStatementRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_ActionClosePreparedStatementRequest_descriptor, + new java.lang.String[] { "PreparedStatementHandle", }); + internal_static_arrow_flight_protocol_sql_ActionBeginTransactionRequest_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_arrow_flight_protocol_sql_ActionBeginTransactionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_ActionBeginTransactionRequest_descriptor, + new java.lang.String[] { }); + internal_static_arrow_flight_protocol_sql_ActionBeginSavepointRequest_descriptor = + getDescriptor().getMessageTypes().get(16); + internal_static_arrow_flight_protocol_sql_ActionBeginSavepointRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_ActionBeginSavepointRequest_descriptor, + new java.lang.String[] { "TransactionId", "Name", }); + internal_static_arrow_flight_protocol_sql_ActionBeginTransactionResult_descriptor = + getDescriptor().getMessageTypes().get(17); + internal_static_arrow_flight_protocol_sql_ActionBeginTransactionResult_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_ActionBeginTransactionResult_descriptor, + new java.lang.String[] { "TransactionId", }); + internal_static_arrow_flight_protocol_sql_ActionBeginSavepointResult_descriptor = + getDescriptor().getMessageTypes().get(18); + internal_static_arrow_flight_protocol_sql_ActionBeginSavepointResult_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_ActionBeginSavepointResult_descriptor, + new java.lang.String[] { "SavepointId", }); + internal_static_arrow_flight_protocol_sql_ActionEndTransactionRequest_descriptor = + getDescriptor().getMessageTypes().get(19); + internal_static_arrow_flight_protocol_sql_ActionEndTransactionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_ActionEndTransactionRequest_descriptor, + new java.lang.String[] { "TransactionId", "Action", }); + internal_static_arrow_flight_protocol_sql_ActionEndSavepointRequest_descriptor = + getDescriptor().getMessageTypes().get(20); + internal_static_arrow_flight_protocol_sql_ActionEndSavepointRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_ActionEndSavepointRequest_descriptor, + new java.lang.String[] { "SavepointId", "Action", }); + internal_static_arrow_flight_protocol_sql_CommandStatementQuery_descriptor = + getDescriptor().getMessageTypes().get(21); + internal_static_arrow_flight_protocol_sql_CommandStatementQuery_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_CommandStatementQuery_descriptor, + new java.lang.String[] { "Query", "TransactionId", "TransactionId", }); + internal_static_arrow_flight_protocol_sql_CommandStatementSubstraitPlan_descriptor = + getDescriptor().getMessageTypes().get(22); + internal_static_arrow_flight_protocol_sql_CommandStatementSubstraitPlan_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_CommandStatementSubstraitPlan_descriptor, + new java.lang.String[] { "Plan", "TransactionId", "TransactionId", }); + internal_static_arrow_flight_protocol_sql_TicketStatementQuery_descriptor = + getDescriptor().getMessageTypes().get(23); + internal_static_arrow_flight_protocol_sql_TicketStatementQuery_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_TicketStatementQuery_descriptor, + new java.lang.String[] { "StatementHandle", }); + internal_static_arrow_flight_protocol_sql_CommandPreparedStatementQuery_descriptor = + getDescriptor().getMessageTypes().get(24); + internal_static_arrow_flight_protocol_sql_CommandPreparedStatementQuery_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_CommandPreparedStatementQuery_descriptor, + new java.lang.String[] { "PreparedStatementHandle", }); + internal_static_arrow_flight_protocol_sql_CommandStatementUpdate_descriptor = + getDescriptor().getMessageTypes().get(25); + internal_static_arrow_flight_protocol_sql_CommandStatementUpdate_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_CommandStatementUpdate_descriptor, + new java.lang.String[] { "Query", "TransactionId", "TransactionId", }); + internal_static_arrow_flight_protocol_sql_CommandPreparedStatementUpdate_descriptor = + getDescriptor().getMessageTypes().get(26); + internal_static_arrow_flight_protocol_sql_CommandPreparedStatementUpdate_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_CommandPreparedStatementUpdate_descriptor, + new java.lang.String[] { "PreparedStatementHandle", }); + internal_static_arrow_flight_protocol_sql_DoPutUpdateResult_descriptor = + getDescriptor().getMessageTypes().get(27); + internal_static_arrow_flight_protocol_sql_DoPutUpdateResult_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_DoPutUpdateResult_descriptor, + new java.lang.String[] { "RecordCount", }); + internal_static_arrow_flight_protocol_sql_DoPutPreparedStatementResult_descriptor = + getDescriptor().getMessageTypes().get(28); + internal_static_arrow_flight_protocol_sql_DoPutPreparedStatementResult_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_DoPutPreparedStatementResult_descriptor, + new java.lang.String[] { "PreparedStatementHandle", "PreparedStatementHandle", }); + internal_static_arrow_flight_protocol_sql_ActionCancelQueryRequest_descriptor = + getDescriptor().getMessageTypes().get(29); + internal_static_arrow_flight_protocol_sql_ActionCancelQueryRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_ActionCancelQueryRequest_descriptor, + new java.lang.String[] { "Info", }); + internal_static_arrow_flight_protocol_sql_ActionCancelQueryResult_descriptor = + getDescriptor().getMessageTypes().get(30); + internal_static_arrow_flight_protocol_sql_ActionCancelQueryResult_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_arrow_flight_protocol_sql_ActionCancelQueryResult_descriptor, + new java.lang.String[] { "Result", }); + experimental.internalInit(descriptor.getExtensions().get(0)); + com.google.protobuf.ExtensionRegistry registry = + com.google.protobuf.ExtensionRegistry.newInstance(); + registry.add(org.apache.arrow.flight.sql.impl.FlightSql.experimental); + com.google.protobuf.Descriptors.FileDescriptor + .internalUpdateFileDescriptor(descriptor, registry); + com.google.protobuf.DescriptorProtos.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/java/flight/flight-core/target/generated-test-sources/protobuf/grpc-java/org/apache/arrow/flight/TestServiceGrpc.java b/java/flight/flight-core/target/generated-test-sources/protobuf/grpc-java/org/apache/arrow/flight/TestServiceGrpc.java new file mode 100644 index 000000000000..138bd64a5c4d --- /dev/null +++ b/java/flight/flight-core/target/generated-test-sources/protobuf/grpc-java/org/apache/arrow/flight/TestServiceGrpc.java @@ -0,0 +1,293 @@ +package org.apache.arrow.flight; + +import static io.grpc.MethodDescriptor.generateFullMethodName; + +/** + */ +@javax.annotation.Generated( + value = "by gRPC proto compiler (version 1.63.0)", + comments = "Source: test.proto") +@io.grpc.stub.annotations.GrpcGenerated +public final class TestServiceGrpc { + + private TestServiceGrpc() {} + + public static final java.lang.String SERVICE_NAME = "TestService"; + + // Static method descriptors that strictly reflect the proto. + private static volatile io.grpc.MethodDescriptor getTestMethod; + + @io.grpc.stub.annotations.RpcMethod( + fullMethodName = SERVICE_NAME + '/' + "Test", + requestType = com.google.protobuf.Empty.class, + responseType = com.google.protobuf.Empty.class, + methodType = io.grpc.MethodDescriptor.MethodType.UNARY) + public static io.grpc.MethodDescriptor getTestMethod() { + io.grpc.MethodDescriptor getTestMethod; + if ((getTestMethod = TestServiceGrpc.getTestMethod) == null) { + synchronized (TestServiceGrpc.class) { + if ((getTestMethod = TestServiceGrpc.getTestMethod) == null) { + TestServiceGrpc.getTestMethod = getTestMethod = + io.grpc.MethodDescriptor.newBuilder() + .setType(io.grpc.MethodDescriptor.MethodType.UNARY) + .setFullMethodName(generateFullMethodName(SERVICE_NAME, "Test")) + .setSampledToLocalTracing(true) + .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + com.google.protobuf.Empty.getDefaultInstance())) + .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( + com.google.protobuf.Empty.getDefaultInstance())) + .setSchemaDescriptor(new TestServiceMethodDescriptorSupplier("Test")) + .build(); + } + } + } + return getTestMethod; + } + + /** + * Creates a new async stub that supports all call types for the service + */ + public static TestServiceStub newStub(io.grpc.Channel channel) { + io.grpc.stub.AbstractStub.StubFactory factory = + new io.grpc.stub.AbstractStub.StubFactory() { + @java.lang.Override + public TestServiceStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new TestServiceStub(channel, callOptions); + } + }; + return TestServiceStub.newStub(factory, channel); + } + + /** + * Creates a new blocking-style stub that supports unary and streaming output calls on the service + */ + public static TestServiceBlockingStub newBlockingStub( + io.grpc.Channel channel) { + io.grpc.stub.AbstractStub.StubFactory factory = + new io.grpc.stub.AbstractStub.StubFactory() { + @java.lang.Override + public TestServiceBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new TestServiceBlockingStub(channel, callOptions); + } + }; + return TestServiceBlockingStub.newStub(factory, channel); + } + + /** + * Creates a new ListenableFuture-style stub that supports unary calls on the service + */ + public static TestServiceFutureStub newFutureStub( + io.grpc.Channel channel) { + io.grpc.stub.AbstractStub.StubFactory factory = + new io.grpc.stub.AbstractStub.StubFactory() { + @java.lang.Override + public TestServiceFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new TestServiceFutureStub(channel, callOptions); + } + }; + return TestServiceFutureStub.newStub(factory, channel); + } + + /** + */ + public interface AsyncService { + + /** + */ + default void test(com.google.protobuf.Empty request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getTestMethod(), responseObserver); + } + } + + /** + * Base class for the server implementation of the service TestService. + */ + public static abstract class TestServiceImplBase + implements io.grpc.BindableService, AsyncService { + + @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { + return TestServiceGrpc.bindService(this); + } + } + + /** + * A stub to allow clients to do asynchronous rpc calls to service TestService. + */ + public static final class TestServiceStub + extends io.grpc.stub.AbstractAsyncStub { + private TestServiceStub( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + super(channel, callOptions); + } + + @java.lang.Override + protected TestServiceStub build( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new TestServiceStub(channel, callOptions); + } + + /** + */ + public void test(com.google.protobuf.Empty request, + io.grpc.stub.StreamObserver responseObserver) { + io.grpc.stub.ClientCalls.asyncUnaryCall( + getChannel().newCall(getTestMethod(), getCallOptions()), request, responseObserver); + } + } + + /** + * A stub to allow clients to do synchronous rpc calls to service TestService. + */ + public static final class TestServiceBlockingStub + extends io.grpc.stub.AbstractBlockingStub { + private TestServiceBlockingStub( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + super(channel, callOptions); + } + + @java.lang.Override + protected TestServiceBlockingStub build( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new TestServiceBlockingStub(channel, callOptions); + } + + /** + */ + public com.google.protobuf.Empty test(com.google.protobuf.Empty request) { + return io.grpc.stub.ClientCalls.blockingUnaryCall( + getChannel(), getTestMethod(), getCallOptions(), request); + } + } + + /** + * A stub to allow clients to do ListenableFuture-style rpc calls to service TestService. + */ + public static final class TestServiceFutureStub + extends io.grpc.stub.AbstractFutureStub { + private TestServiceFutureStub( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + super(channel, callOptions); + } + + @java.lang.Override + protected TestServiceFutureStub build( + io.grpc.Channel channel, io.grpc.CallOptions callOptions) { + return new TestServiceFutureStub(channel, callOptions); + } + + /** + */ + public com.google.common.util.concurrent.ListenableFuture test( + com.google.protobuf.Empty request) { + return io.grpc.stub.ClientCalls.futureUnaryCall( + getChannel().newCall(getTestMethod(), getCallOptions()), request); + } + } + + private static final int METHODID_TEST = 0; + + private static final class MethodHandlers implements + io.grpc.stub.ServerCalls.UnaryMethod, + io.grpc.stub.ServerCalls.ServerStreamingMethod, + io.grpc.stub.ServerCalls.ClientStreamingMethod, + io.grpc.stub.ServerCalls.BidiStreamingMethod { + private final AsyncService serviceImpl; + private final int methodId; + + MethodHandlers(AsyncService serviceImpl, int methodId) { + this.serviceImpl = serviceImpl; + this.methodId = methodId; + } + + @java.lang.Override + @java.lang.SuppressWarnings("unchecked") + public void invoke(Req request, io.grpc.stub.StreamObserver responseObserver) { + switch (methodId) { + case METHODID_TEST: + serviceImpl.test((com.google.protobuf.Empty) request, + (io.grpc.stub.StreamObserver) responseObserver); + break; + default: + throw new AssertionError(); + } + } + + @java.lang.Override + @java.lang.SuppressWarnings("unchecked") + public io.grpc.stub.StreamObserver invoke( + io.grpc.stub.StreamObserver responseObserver) { + switch (methodId) { + default: + throw new AssertionError(); + } + } + } + + public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { + return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) + .addMethod( + getTestMethod(), + io.grpc.stub.ServerCalls.asyncUnaryCall( + new MethodHandlers< + com.google.protobuf.Empty, + com.google.protobuf.Empty>( + service, METHODID_TEST))) + .build(); + } + + private static abstract class TestServiceBaseDescriptorSupplier + implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { + TestServiceBaseDescriptorSupplier() {} + + @java.lang.Override + public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { + return org.apache.arrow.flight.Test.getDescriptor(); + } + + @java.lang.Override + public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { + return getFileDescriptor().findServiceByName("TestService"); + } + } + + private static final class TestServiceFileDescriptorSupplier + extends TestServiceBaseDescriptorSupplier { + TestServiceFileDescriptorSupplier() {} + } + + private static final class TestServiceMethodDescriptorSupplier + extends TestServiceBaseDescriptorSupplier + implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { + private final java.lang.String methodName; + + TestServiceMethodDescriptorSupplier(java.lang.String methodName) { + this.methodName = methodName; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { + return getServiceDescriptor().findMethodByName(methodName); + } + } + + private static volatile io.grpc.ServiceDescriptor serviceDescriptor; + + public static io.grpc.ServiceDescriptor getServiceDescriptor() { + io.grpc.ServiceDescriptor result = serviceDescriptor; + if (result == null) { + synchronized (TestServiceGrpc.class) { + result = serviceDescriptor; + if (result == null) { + serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) + .setSchemaDescriptor(new TestServiceFileDescriptorSupplier()) + .addMethod(getTestMethod()) + .build(); + } + } + } + return result; + } +} diff --git a/java/flight/flight-core/target/generated-test-sources/protobuf/java/org/apache/arrow/flight/Test.java b/java/flight/flight-core/target/generated-test-sources/protobuf/java/org/apache/arrow/flight/Test.java new file mode 100644 index 000000000000..638501be7469 --- /dev/null +++ b/java/flight/flight-core/target/generated-test-sources/protobuf/java/org/apache/arrow/flight/Test.java @@ -0,0 +1,40 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: test.proto + +package org.apache.arrow.flight; + +public final class Test { + private Test() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\ntest.proto\032\033google/protobuf/empty.prot" + + "o2G\n\013TestService\0228\n\004Test\022\026.google.protob" + + "uf.Empty\032\026.google.protobuf.Empty\"\000B\031\n\027or" + + "g.apache.arrow.flightb\006proto3" + }; + descriptor = com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + com.google.protobuf.EmptyProto.getDescriptor(), + }); + com.google.protobuf.EmptyProto.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/java/flight/flight-core/target/generated-test-sources/protobuf/java/org/apache/arrow/flight/perf/impl/PerfOuterClass.java b/java/flight/flight-core/target/generated-test-sources/protobuf/java/org/apache/arrow/flight/perf/impl/PerfOuterClass.java new file mode 100644 index 000000000000..7a95db062676 --- /dev/null +++ b/java/flight/flight-core/target/generated-test-sources/protobuf/java/org/apache/arrow/flight/perf/impl/PerfOuterClass.java @@ -0,0 +1,1548 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: perf.proto + +package org.apache.arrow.flight.perf.impl; + +public final class PerfOuterClass { + private PerfOuterClass() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + public interface PerfOrBuilder extends + // @@protoc_insertion_point(interface_extends:Perf) + com.google.protobuf.MessageOrBuilder { + + /** + * bytes schema = 1; + * @return The schema. + */ + com.google.protobuf.ByteString getSchema(); + + /** + * int32 stream_count = 2; + * @return The streamCount. + */ + int getStreamCount(); + + /** + * int64 records_per_stream = 3; + * @return The recordsPerStream. + */ + long getRecordsPerStream(); + + /** + * int32 records_per_batch = 4; + * @return The recordsPerBatch. + */ + int getRecordsPerBatch(); + } + /** + * Protobuf type {@code Perf} + */ + public static final class Perf extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:Perf) + PerfOrBuilder { + private static final long serialVersionUID = 0L; + // Use Perf.newBuilder() to construct. + private Perf(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Perf() { + schema_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new Perf(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.perf.impl.PerfOuterClass.internal_static_Perf_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.perf.impl.PerfOuterClass.internal_static_Perf_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf.class, org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf.Builder.class); + } + + public static final int SCHEMA_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString schema_ = com.google.protobuf.ByteString.EMPTY; + /** + * bytes schema = 1; + * @return The schema. + */ + @java.lang.Override + public com.google.protobuf.ByteString getSchema() { + return schema_; + } + + public static final int STREAM_COUNT_FIELD_NUMBER = 2; + private int streamCount_ = 0; + /** + * int32 stream_count = 2; + * @return The streamCount. + */ + @java.lang.Override + public int getStreamCount() { + return streamCount_; + } + + public static final int RECORDS_PER_STREAM_FIELD_NUMBER = 3; + private long recordsPerStream_ = 0L; + /** + * int64 records_per_stream = 3; + * @return The recordsPerStream. + */ + @java.lang.Override + public long getRecordsPerStream() { + return recordsPerStream_; + } + + public static final int RECORDS_PER_BATCH_FIELD_NUMBER = 4; + private int recordsPerBatch_ = 0; + /** + * int32 records_per_batch = 4; + * @return The recordsPerBatch. + */ + @java.lang.Override + public int getRecordsPerBatch() { + return recordsPerBatch_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!schema_.isEmpty()) { + output.writeBytes(1, schema_); + } + if (streamCount_ != 0) { + output.writeInt32(2, streamCount_); + } + if (recordsPerStream_ != 0L) { + output.writeInt64(3, recordsPerStream_); + } + if (recordsPerBatch_ != 0) { + output.writeInt32(4, recordsPerBatch_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!schema_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, schema_); + } + if (streamCount_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(2, streamCount_); + } + if (recordsPerStream_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(3, recordsPerStream_); + } + if (recordsPerBatch_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(4, recordsPerBatch_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf)) { + return super.equals(obj); + } + org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf other = (org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf) obj; + + if (!getSchema() + .equals(other.getSchema())) return false; + if (getStreamCount() + != other.getStreamCount()) return false; + if (getRecordsPerStream() + != other.getRecordsPerStream()) return false; + if (getRecordsPerBatch() + != other.getRecordsPerBatch()) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + SCHEMA_FIELD_NUMBER; + hash = (53 * hash) + getSchema().hashCode(); + hash = (37 * hash) + STREAM_COUNT_FIELD_NUMBER; + hash = (53 * hash) + getStreamCount(); + hash = (37 * hash) + RECORDS_PER_STREAM_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getRecordsPerStream()); + hash = (37 * hash) + RECORDS_PER_BATCH_FIELD_NUMBER; + hash = (53 * hash) + getRecordsPerBatch(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code Perf} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:Perf) + org.apache.arrow.flight.perf.impl.PerfOuterClass.PerfOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.perf.impl.PerfOuterClass.internal_static_Perf_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.perf.impl.PerfOuterClass.internal_static_Perf_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf.class, org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf.Builder.class); + } + + // Construct using org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + schema_ = com.google.protobuf.ByteString.EMPTY; + streamCount_ = 0; + recordsPerStream_ = 0L; + recordsPerBatch_ = 0; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.perf.impl.PerfOuterClass.internal_static_Perf_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf getDefaultInstanceForType() { + return org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf build() { + org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf buildPartial() { + org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf result = new org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.schema_ = schema_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.streamCount_ = streamCount_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.recordsPerStream_ = recordsPerStream_; + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.recordsPerBatch_ = recordsPerBatch_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf) { + return mergeFrom((org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf other) { + if (other == org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf.getDefaultInstance()) return this; + if (other.getSchema() != com.google.protobuf.ByteString.EMPTY) { + setSchema(other.getSchema()); + } + if (other.getStreamCount() != 0) { + setStreamCount(other.getStreamCount()); + } + if (other.getRecordsPerStream() != 0L) { + setRecordsPerStream(other.getRecordsPerStream()); + } + if (other.getRecordsPerBatch() != 0) { + setRecordsPerBatch(other.getRecordsPerBatch()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + schema_ = input.readBytes(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 16: { + streamCount_ = input.readInt32(); + bitField0_ |= 0x00000002; + break; + } // case 16 + case 24: { + recordsPerStream_ = input.readInt64(); + bitField0_ |= 0x00000004; + break; + } // case 24 + case 32: { + recordsPerBatch_ = input.readInt32(); + bitField0_ |= 0x00000008; + break; + } // case 32 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.ByteString schema_ = com.google.protobuf.ByteString.EMPTY; + /** + * bytes schema = 1; + * @return The schema. + */ + @java.lang.Override + public com.google.protobuf.ByteString getSchema() { + return schema_; + } + /** + * bytes schema = 1; + * @param value The schema to set. + * @return This builder for chaining. + */ + public Builder setSchema(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + schema_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * bytes schema = 1; + * @return This builder for chaining. + */ + public Builder clearSchema() { + bitField0_ = (bitField0_ & ~0x00000001); + schema_ = getDefaultInstance().getSchema(); + onChanged(); + return this; + } + + private int streamCount_ ; + /** + * int32 stream_count = 2; + * @return The streamCount. + */ + @java.lang.Override + public int getStreamCount() { + return streamCount_; + } + /** + * int32 stream_count = 2; + * @param value The streamCount to set. + * @return This builder for chaining. + */ + public Builder setStreamCount(int value) { + + streamCount_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * int32 stream_count = 2; + * @return This builder for chaining. + */ + public Builder clearStreamCount() { + bitField0_ = (bitField0_ & ~0x00000002); + streamCount_ = 0; + onChanged(); + return this; + } + + private long recordsPerStream_ ; + /** + * int64 records_per_stream = 3; + * @return The recordsPerStream. + */ + @java.lang.Override + public long getRecordsPerStream() { + return recordsPerStream_; + } + /** + * int64 records_per_stream = 3; + * @param value The recordsPerStream to set. + * @return This builder for chaining. + */ + public Builder setRecordsPerStream(long value) { + + recordsPerStream_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * int64 records_per_stream = 3; + * @return This builder for chaining. + */ + public Builder clearRecordsPerStream() { + bitField0_ = (bitField0_ & ~0x00000004); + recordsPerStream_ = 0L; + onChanged(); + return this; + } + + private int recordsPerBatch_ ; + /** + * int32 records_per_batch = 4; + * @return The recordsPerBatch. + */ + @java.lang.Override + public int getRecordsPerBatch() { + return recordsPerBatch_; + } + /** + * int32 records_per_batch = 4; + * @param value The recordsPerBatch to set. + * @return This builder for chaining. + */ + public Builder setRecordsPerBatch(int value) { + + recordsPerBatch_ = value; + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * int32 records_per_batch = 4; + * @return This builder for chaining. + */ + public Builder clearRecordsPerBatch() { + bitField0_ = (bitField0_ & ~0x00000008); + recordsPerBatch_ = 0; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:Perf) + } + + // @@protoc_insertion_point(class_scope:Perf) + private static final org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf(); + } + + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Perf parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface TokenOrBuilder extends + // @@protoc_insertion_point(interface_extends:Token) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * definition of entire flight.
+     * 
+ * + * .Perf definition = 1; + * @return Whether the definition field is set. + */ + boolean hasDefinition(); + /** + *
+     * definition of entire flight.
+     * 
+ * + * .Perf definition = 1; + * @return The definition. + */ + org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf getDefinition(); + /** + *
+     * definition of entire flight.
+     * 
+ * + * .Perf definition = 1; + */ + org.apache.arrow.flight.perf.impl.PerfOuterClass.PerfOrBuilder getDefinitionOrBuilder(); + + /** + *
+     * inclusive start
+     * 
+ * + * int64 start = 2; + * @return The start. + */ + long getStart(); + + /** + *
+     * exclusive end
+     * 
+ * + * int64 end = 3; + * @return The end. + */ + long getEnd(); + } + /** + *
+   *
+   * Payload of ticket
+   * 
+ * + * Protobuf type {@code Token} + */ + public static final class Token extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:Token) + TokenOrBuilder { + private static final long serialVersionUID = 0L; + // Use Token.newBuilder() to construct. + private Token(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Token() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new Token(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.perf.impl.PerfOuterClass.internal_static_Token_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.perf.impl.PerfOuterClass.internal_static_Token_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.perf.impl.PerfOuterClass.Token.class, org.apache.arrow.flight.perf.impl.PerfOuterClass.Token.Builder.class); + } + + public static final int DEFINITION_FIELD_NUMBER = 1; + private org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf definition_; + /** + *
+     * definition of entire flight.
+     * 
+ * + * .Perf definition = 1; + * @return Whether the definition field is set. + */ + @java.lang.Override + public boolean hasDefinition() { + return definition_ != null; + } + /** + *
+     * definition of entire flight.
+     * 
+ * + * .Perf definition = 1; + * @return The definition. + */ + @java.lang.Override + public org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf getDefinition() { + return definition_ == null ? org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf.getDefaultInstance() : definition_; + } + /** + *
+     * definition of entire flight.
+     * 
+ * + * .Perf definition = 1; + */ + @java.lang.Override + public org.apache.arrow.flight.perf.impl.PerfOuterClass.PerfOrBuilder getDefinitionOrBuilder() { + return definition_ == null ? org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf.getDefaultInstance() : definition_; + } + + public static final int START_FIELD_NUMBER = 2; + private long start_ = 0L; + /** + *
+     * inclusive start
+     * 
+ * + * int64 start = 2; + * @return The start. + */ + @java.lang.Override + public long getStart() { + return start_; + } + + public static final int END_FIELD_NUMBER = 3; + private long end_ = 0L; + /** + *
+     * exclusive end
+     * 
+ * + * int64 end = 3; + * @return The end. + */ + @java.lang.Override + public long getEnd() { + return end_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (definition_ != null) { + output.writeMessage(1, getDefinition()); + } + if (start_ != 0L) { + output.writeInt64(2, start_); + } + if (end_ != 0L) { + output.writeInt64(3, end_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (definition_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getDefinition()); + } + if (start_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(2, start_); + } + if (end_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(3, end_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.flight.perf.impl.PerfOuterClass.Token)) { + return super.equals(obj); + } + org.apache.arrow.flight.perf.impl.PerfOuterClass.Token other = (org.apache.arrow.flight.perf.impl.PerfOuterClass.Token) obj; + + if (hasDefinition() != other.hasDefinition()) return false; + if (hasDefinition()) { + if (!getDefinition() + .equals(other.getDefinition())) return false; + } + if (getStart() + != other.getStart()) return false; + if (getEnd() + != other.getEnd()) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasDefinition()) { + hash = (37 * hash) + DEFINITION_FIELD_NUMBER; + hash = (53 * hash) + getDefinition().hashCode(); + } + hash = (37 * hash) + START_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getStart()); + hash = (37 * hash) + END_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getEnd()); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Token parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Token parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Token parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Token parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Token parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Token parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Token parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Token parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Token parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Token parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Token parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Token parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.flight.perf.impl.PerfOuterClass.Token prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     *
+     * Payload of ticket
+     * 
+ * + * Protobuf type {@code Token} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:Token) + org.apache.arrow.flight.perf.impl.PerfOuterClass.TokenOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.flight.perf.impl.PerfOuterClass.internal_static_Token_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.flight.perf.impl.PerfOuterClass.internal_static_Token_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.flight.perf.impl.PerfOuterClass.Token.class, org.apache.arrow.flight.perf.impl.PerfOuterClass.Token.Builder.class); + } + + // Construct using org.apache.arrow.flight.perf.impl.PerfOuterClass.Token.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + definition_ = null; + if (definitionBuilder_ != null) { + definitionBuilder_.dispose(); + definitionBuilder_ = null; + } + start_ = 0L; + end_ = 0L; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.flight.perf.impl.PerfOuterClass.internal_static_Token_descriptor; + } + + @java.lang.Override + public org.apache.arrow.flight.perf.impl.PerfOuterClass.Token getDefaultInstanceForType() { + return org.apache.arrow.flight.perf.impl.PerfOuterClass.Token.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.flight.perf.impl.PerfOuterClass.Token build() { + org.apache.arrow.flight.perf.impl.PerfOuterClass.Token result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.flight.perf.impl.PerfOuterClass.Token buildPartial() { + org.apache.arrow.flight.perf.impl.PerfOuterClass.Token result = new org.apache.arrow.flight.perf.impl.PerfOuterClass.Token(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.flight.perf.impl.PerfOuterClass.Token result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.definition_ = definitionBuilder_ == null + ? definition_ + : definitionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.start_ = start_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.end_ = end_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.flight.perf.impl.PerfOuterClass.Token) { + return mergeFrom((org.apache.arrow.flight.perf.impl.PerfOuterClass.Token)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.flight.perf.impl.PerfOuterClass.Token other) { + if (other == org.apache.arrow.flight.perf.impl.PerfOuterClass.Token.getDefaultInstance()) return this; + if (other.hasDefinition()) { + mergeDefinition(other.getDefinition()); + } + if (other.getStart() != 0L) { + setStart(other.getStart()); + } + if (other.getEnd() != 0L) { + setEnd(other.getEnd()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + input.readMessage( + getDefinitionFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 16: { + start_ = input.readInt64(); + bitField0_ |= 0x00000002; + break; + } // case 16 + case 24: { + end_ = input.readInt64(); + bitField0_ |= 0x00000004; + break; + } // case 24 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf definition_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf, org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf.Builder, org.apache.arrow.flight.perf.impl.PerfOuterClass.PerfOrBuilder> definitionBuilder_; + /** + *
+       * definition of entire flight.
+       * 
+ * + * .Perf definition = 1; + * @return Whether the definition field is set. + */ + public boolean hasDefinition() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+       * definition of entire flight.
+       * 
+ * + * .Perf definition = 1; + * @return The definition. + */ + public org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf getDefinition() { + if (definitionBuilder_ == null) { + return definition_ == null ? org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf.getDefaultInstance() : definition_; + } else { + return definitionBuilder_.getMessage(); + } + } + /** + *
+       * definition of entire flight.
+       * 
+ * + * .Perf definition = 1; + */ + public Builder setDefinition(org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf value) { + if (definitionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + definition_ = value; + } else { + definitionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * definition of entire flight.
+       * 
+ * + * .Perf definition = 1; + */ + public Builder setDefinition( + org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf.Builder builderForValue) { + if (definitionBuilder_ == null) { + definition_ = builderForValue.build(); + } else { + definitionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * definition of entire flight.
+       * 
+ * + * .Perf definition = 1; + */ + public Builder mergeDefinition(org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf value) { + if (definitionBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0) && + definition_ != null && + definition_ != org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf.getDefaultInstance()) { + getDefinitionBuilder().mergeFrom(value); + } else { + definition_ = value; + } + } else { + definitionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * definition of entire flight.
+       * 
+ * + * .Perf definition = 1; + */ + public Builder clearDefinition() { + bitField0_ = (bitField0_ & ~0x00000001); + definition_ = null; + if (definitionBuilder_ != null) { + definitionBuilder_.dispose(); + definitionBuilder_ = null; + } + onChanged(); + return this; + } + /** + *
+       * definition of entire flight.
+       * 
+ * + * .Perf definition = 1; + */ + public org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf.Builder getDefinitionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getDefinitionFieldBuilder().getBuilder(); + } + /** + *
+       * definition of entire flight.
+       * 
+ * + * .Perf definition = 1; + */ + public org.apache.arrow.flight.perf.impl.PerfOuterClass.PerfOrBuilder getDefinitionOrBuilder() { + if (definitionBuilder_ != null) { + return definitionBuilder_.getMessageOrBuilder(); + } else { + return definition_ == null ? + org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf.getDefaultInstance() : definition_; + } + } + /** + *
+       * definition of entire flight.
+       * 
+ * + * .Perf definition = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf, org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf.Builder, org.apache.arrow.flight.perf.impl.PerfOuterClass.PerfOrBuilder> + getDefinitionFieldBuilder() { + if (definitionBuilder_ == null) { + definitionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf, org.apache.arrow.flight.perf.impl.PerfOuterClass.Perf.Builder, org.apache.arrow.flight.perf.impl.PerfOuterClass.PerfOrBuilder>( + getDefinition(), + getParentForChildren(), + isClean()); + definition_ = null; + } + return definitionBuilder_; + } + + private long start_ ; + /** + *
+       * inclusive start
+       * 
+ * + * int64 start = 2; + * @return The start. + */ + @java.lang.Override + public long getStart() { + return start_; + } + /** + *
+       * inclusive start
+       * 
+ * + * int64 start = 2; + * @param value The start to set. + * @return This builder for chaining. + */ + public Builder setStart(long value) { + + start_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       * inclusive start
+       * 
+ * + * int64 start = 2; + * @return This builder for chaining. + */ + public Builder clearStart() { + bitField0_ = (bitField0_ & ~0x00000002); + start_ = 0L; + onChanged(); + return this; + } + + private long end_ ; + /** + *
+       * exclusive end
+       * 
+ * + * int64 end = 3; + * @return The end. + */ + @java.lang.Override + public long getEnd() { + return end_; + } + /** + *
+       * exclusive end
+       * 
+ * + * int64 end = 3; + * @param value The end to set. + * @return This builder for chaining. + */ + public Builder setEnd(long value) { + + end_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       * exclusive end
+       * 
+ * + * int64 end = 3; + * @return This builder for chaining. + */ + public Builder clearEnd() { + bitField0_ = (bitField0_ & ~0x00000004); + end_ = 0L; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:Token) + } + + // @@protoc_insertion_point(class_scope:Token) + private static final org.apache.arrow.flight.perf.impl.PerfOuterClass.Token DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.flight.perf.impl.PerfOuterClass.Token(); + } + + public static org.apache.arrow.flight.perf.impl.PerfOuterClass.Token getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Token parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.flight.perf.impl.PerfOuterClass.Token getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_Perf_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_Perf_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_Token_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_Token_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\nperf.proto\"c\n\004Perf\022\016\n\006schema\030\001 \001(\014\022\024\n\014" + + "stream_count\030\002 \001(\005\022\032\n\022records_per_stream" + + "\030\003 \001(\003\022\031\n\021records_per_batch\030\004 \001(\005\">\n\005Tok" + + "en\022\031\n\ndefinition\030\001 \001(\0132\005.Perf\022\r\n\005start\030\002" + + " \001(\003\022\013\n\003end\030\003 \001(\003B#\n!org.apache.arrow.fl" + + "ight.perf.implb\006proto3" + }; + descriptor = com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }); + internal_static_Perf_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_Perf_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_Perf_descriptor, + new java.lang.String[] { "Schema", "StreamCount", "RecordsPerStream", "RecordsPerBatch", }); + internal_static_Token_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_Token_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_Token_descriptor, + new java.lang.String[] { "Definition", "Start", "End", }); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/java/flight/flight-core/target/protoc-dependencies/a8f2bff45a301f3998ee512797f97f9e/grpc/binlog/v1/binarylog.proto b/java/flight/flight-core/target/protoc-dependencies/a8f2bff45a301f3998ee512797f97f9e/grpc/binlog/v1/binarylog.proto new file mode 100644 index 000000000000..9ed1733e2d88 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/a8f2bff45a301f3998ee512797f97f9e/grpc/binlog/v1/binarylog.proto @@ -0,0 +1,209 @@ +// Copyright 2018 The gRPC Authors +// All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// The canonical version of this proto can be found at +// https://github.com/grpc/grpc-proto/blob/master/grpc/binlog/v1/binarylog.proto + +syntax = "proto3"; + +package grpc.binarylog.v1; + +import "google/protobuf/duration.proto"; +import "google/protobuf/timestamp.proto"; + +option go_package = "google.golang.org/grpc/binarylog/grpc_binarylog_v1"; +option java_multiple_files = true; +option java_package = "io.grpc.binarylog.v1"; +option java_outer_classname = "BinaryLogProto"; + +// Log entry we store in binary logs +message GrpcLogEntry { + // Enumerates the type of event + // Note the terminology is different from the RPC semantics + // definition, but the same meaning is expressed here. + enum EventType { + EVENT_TYPE_UNKNOWN = 0; + // Header sent from client to server + EVENT_TYPE_CLIENT_HEADER = 1; + // Header sent from server to client + EVENT_TYPE_SERVER_HEADER = 2; + // Message sent from client to server + EVENT_TYPE_CLIENT_MESSAGE = 3; + // Message sent from server to client + EVENT_TYPE_SERVER_MESSAGE = 4; + // A signal that client is done sending + EVENT_TYPE_CLIENT_HALF_CLOSE = 5; + // Trailer indicates the end of the RPC. + // On client side, this event means a trailer was either received + // from the network or the gRPC library locally generated a status + // to inform the application about a failure. + // On server side, this event means the server application requested + // to send a trailer. Note: EVENT_TYPE_CANCEL may still arrive after + // this due to races on server side. + EVENT_TYPE_SERVER_TRAILER = 6; + // A signal that the RPC is cancelled. On client side, this + // indicates the client application requests a cancellation. + // On server side, this indicates that cancellation was detected. + // Note: This marks the end of the RPC. Events may arrive after + // this due to races. For example, on client side a trailer + // may arrive even though the application requested to cancel the RPC. + EVENT_TYPE_CANCEL = 7; + } + + // Enumerates the entity that generates the log entry + enum Logger { + LOGGER_UNKNOWN = 0; + LOGGER_CLIENT = 1; + LOGGER_SERVER = 2; + } + + // The timestamp of the binary log message + google.protobuf.Timestamp timestamp = 1; + + // Uniquely identifies a call. The value must not be 0 in order to disambiguate + // from an unset value. + // Each call may have several log entries, they will all have the same call_id. + // Nothing is guaranteed about their value other than they are unique across + // different RPCs in the same gRPC process. + uint64 call_id = 2; + + // The entry sequence id for this call. The first GrpcLogEntry has a + // value of 1, to disambiguate from an unset value. The purpose of + // this field is to detect missing entries in environments where + // durability or ordering is not guaranteed. + uint64 sequence_id_within_call = 3; + + EventType type = 4; + Logger logger = 5; // One of the above Logger enum + + // The logger uses one of the following fields to record the payload, + // according to the type of the log entry. + oneof payload { + ClientHeader client_header = 6; + ServerHeader server_header = 7; + // Used by EVENT_TYPE_CLIENT_MESSAGE, EVENT_TYPE_SERVER_MESSAGE + Message message = 8; + Trailer trailer = 9; + } + + // true if payload does not represent the full message or metadata. + bool payload_truncated = 10; + + // Peer address information, will only be recorded on the first + // incoming event. On client side, peer is logged on + // EVENT_TYPE_SERVER_HEADER normally or EVENT_TYPE_SERVER_TRAILER in + // the case of trailers-only. On server side, peer is always + // logged on EVENT_TYPE_CLIENT_HEADER. + Address peer = 11; +}; + +message ClientHeader { + // This contains only the metadata from the application. + Metadata metadata = 1; + + // The name of the RPC method, which looks something like: + // // + // Note the leading "/" character. + string method_name = 2; + + // A single process may be used to run multiple virtual + // servers with different identities. + // The authority is the name of such a server identitiy. + // It is typically a portion of the URI in the form of + // or : . + string authority = 3; + + // the RPC timeout + google.protobuf.Duration timeout = 4; +} + +message ServerHeader { + // This contains only the metadata from the application. + Metadata metadata = 1; +} + +message Trailer { + // This contains only the metadata from the application. + Metadata metadata = 1; + + // The gRPC status code. + uint32 status_code = 2; + + // An original status message before any transport specific + // encoding. + string status_message = 3; + + // The value of the 'grpc-status-details-bin' metadata key. If + // present, this is always an encoded 'google.rpc.Status' message. + bytes status_details = 4; +} + +// Message payload, used by CLIENT_MESSAGE and SERVER_MESSAGE +message Message { + // Length of the message. It may not be the same as the length of the + // data field, as the logging payload can be truncated or omitted. + uint32 length = 1; + // May be truncated or omitted. + bytes data = 2; +} + +// A list of metadata pairs, used in the payload of client header, +// server header, and server trailer. +// Implementations may omit some entries to honor the header limits +// of GRPC_BINARY_LOG_CONFIG. +// +// Header keys added by gRPC are omitted. To be more specific, +// implementations will not log the following entries, and this is +// not to be treated as a truncation: +// - entries handled by grpc that are not user visible, such as those +// that begin with 'grpc-' (with exception of grpc-trace-bin) +// or keys like 'lb-token' +// - transport specific entries, including but not limited to: +// ':path', ':authority', 'content-encoding', 'user-agent', 'te', etc +// - entries added for call credentials +// +// Implementations must always log grpc-trace-bin if it is present. +// Practically speaking it will only be visible on server side because +// grpc-trace-bin is managed by low level client side mechanisms +// inaccessible from the application level. On server side, the +// header is just a normal metadata key. +// The pair will not count towards the size limit. +message Metadata { + repeated MetadataEntry entry = 1; +} + +// A metadata key value pair +message MetadataEntry { + string key = 1; + bytes value = 2; +} + +// Address information +message Address { + enum Type { + TYPE_UNKNOWN = 0; + // address is in 1.2.3.4 form + TYPE_IPV4 = 1; + // address is in IPv6 canonical form (RFC5952 section 4) + // The scope is NOT included in the address string. + TYPE_IPV6 = 2; + // address is UDS string + TYPE_UNIX = 3; + }; + Type type = 1; + string address = 2; + // only for TYPE_IPV4 and TYPE_IPV6 + uint32 ip_port = 3; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/a8f2bff45a301f3998ee512797f97f9e/grpc/channelz/v1/channelz.proto b/java/flight/flight-core/target/protoc-dependencies/a8f2bff45a301f3998ee512797f97f9e/grpc/channelz/v1/channelz.proto new file mode 100644 index 000000000000..d0781094ea8b --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/a8f2bff45a301f3998ee512797f97f9e/grpc/channelz/v1/channelz.proto @@ -0,0 +1,564 @@ +// Copyright 2018 The gRPC Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This file defines an interface for exporting monitoring information +// out of gRPC servers. See the full design at +// https://github.com/grpc/proposal/blob/master/A14-channelz.md +// +// The canonical version of this proto can be found at +// https://github.com/grpc/grpc-proto/blob/master/grpc/channelz/v1/channelz.proto + +syntax = "proto3"; + +package grpc.channelz.v1; + +import "google/protobuf/any.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/wrappers.proto"; + +option go_package = "google.golang.org/grpc/channelz/grpc_channelz_v1"; +option java_multiple_files = true; +option java_package = "io.grpc.channelz.v1"; +option java_outer_classname = "ChannelzProto"; + +// Channel is a logical grouping of channels, subchannels, and sockets. +message Channel { + // The identifier for this channel. This should be set. + ChannelRef ref = 1; + // Data specific to this channel. + ChannelData data = 2; + // At most one of 'channel_ref+subchannel_ref' and 'socket' is set. + + // There are no ordering guarantees on the order of channel refs. + // There may not be cycles in the ref graph. + // A channel ref may be present in more than one channel or subchannel. + repeated ChannelRef channel_ref = 3; + + // At most one of 'channel_ref+subchannel_ref' and 'socket' is set. + // There are no ordering guarantees on the order of subchannel refs. + // There may not be cycles in the ref graph. + // A sub channel ref may be present in more than one channel or subchannel. + repeated SubchannelRef subchannel_ref = 4; + + // There are no ordering guarantees on the order of sockets. + repeated SocketRef socket_ref = 5; +} + +// Subchannel is a logical grouping of channels, subchannels, and sockets. +// A subchannel is load balanced over by it's ancestor +message Subchannel { + // The identifier for this channel. + SubchannelRef ref = 1; + // Data specific to this channel. + ChannelData data = 2; + // At most one of 'channel_ref+subchannel_ref' and 'socket' is set. + + // There are no ordering guarantees on the order of channel refs. + // There may not be cycles in the ref graph. + // A channel ref may be present in more than one channel or subchannel. + repeated ChannelRef channel_ref = 3; + + // At most one of 'channel_ref+subchannel_ref' and 'socket' is set. + // There are no ordering guarantees on the order of subchannel refs. + // There may not be cycles in the ref graph. + // A sub channel ref may be present in more than one channel or subchannel. + repeated SubchannelRef subchannel_ref = 4; + + // There are no ordering guarantees on the order of sockets. + repeated SocketRef socket_ref = 5; +} + +// These come from the specified states in this document: +// https://github.com/grpc/grpc/blob/master/doc/connectivity-semantics-and-api.md +message ChannelConnectivityState { + enum State { + UNKNOWN = 0; + IDLE = 1; + CONNECTING = 2; + READY = 3; + TRANSIENT_FAILURE = 4; + SHUTDOWN = 5; + } + State state = 1; +} + +// Channel data is data related to a specific Channel or Subchannel. +message ChannelData { + // The connectivity state of the channel or subchannel. Implementations + // should always set this. + ChannelConnectivityState state = 1; + + // The target this channel originally tried to connect to. May be absent + string target = 2; + + // A trace of recent events on the channel. May be absent. + ChannelTrace trace = 3; + + // The number of calls started on the channel + int64 calls_started = 4; + // The number of calls that have completed with an OK status + int64 calls_succeeded = 5; + // The number of calls that have completed with a non-OK status + int64 calls_failed = 6; + + // The last time a call was started on the channel. + google.protobuf.Timestamp last_call_started_timestamp = 7; +} + +// A trace event is an interesting thing that happened to a channel or +// subchannel, such as creation, address resolution, subchannel creation, etc. +message ChannelTraceEvent { + // High level description of the event. + string description = 1; + // The supported severity levels of trace events. + enum Severity { + CT_UNKNOWN = 0; + CT_INFO = 1; + CT_WARNING = 2; + CT_ERROR = 3; + } + // the severity of the trace event + Severity severity = 2; + // When this event occurred. + google.protobuf.Timestamp timestamp = 3; + // ref of referenced channel or subchannel. + // Optional, only present if this event refers to a child object. For example, + // this field would be filled if this trace event was for a subchannel being + // created. + oneof child_ref { + ChannelRef channel_ref = 4; + SubchannelRef subchannel_ref = 5; + } +} + +// ChannelTrace represents the recent events that have occurred on the channel. +message ChannelTrace { + // Number of events ever logged in this tracing object. This can differ from + // events.size() because events can be overwritten or garbage collected by + // implementations. + int64 num_events_logged = 1; + // Time that this channel was created. + google.protobuf.Timestamp creation_timestamp = 2; + // List of events that have occurred on this channel. + repeated ChannelTraceEvent events = 3; +} + +// ChannelRef is a reference to a Channel. +message ChannelRef { + // The globally unique id for this channel. Must be a positive number. + int64 channel_id = 1; + // An optional name associated with the channel. + string name = 2; + // Intentionally don't use field numbers from other refs. + reserved 3, 4, 5, 6, 7, 8; +} + +// SubchannelRef is a reference to a Subchannel. +message SubchannelRef { + // The globally unique id for this subchannel. Must be a positive number. + int64 subchannel_id = 7; + // An optional name associated with the subchannel. + string name = 8; + // Intentionally don't use field numbers from other refs. + reserved 1, 2, 3, 4, 5, 6; +} + +// SocketRef is a reference to a Socket. +message SocketRef { + // The globally unique id for this socket. Must be a positive number. + int64 socket_id = 3; + // An optional name associated with the socket. + string name = 4; + // Intentionally don't use field numbers from other refs. + reserved 1, 2, 5, 6, 7, 8; +} + +// ServerRef is a reference to a Server. +message ServerRef { + // A globally unique identifier for this server. Must be a positive number. + int64 server_id = 5; + // An optional name associated with the server. + string name = 6; + // Intentionally don't use field numbers from other refs. + reserved 1, 2, 3, 4, 7, 8; +} + +// Server represents a single server. There may be multiple servers in a single +// program. +message Server { + // The identifier for a Server. This should be set. + ServerRef ref = 1; + // The associated data of the Server. + ServerData data = 2; + + // The sockets that the server is listening on. There are no ordering + // guarantees. This may be absent. + repeated SocketRef listen_socket = 3; +} + +// ServerData is data for a specific Server. +message ServerData { + // A trace of recent events on the server. May be absent. + ChannelTrace trace = 1; + + // The number of incoming calls started on the server + int64 calls_started = 2; + // The number of incoming calls that have completed with an OK status + int64 calls_succeeded = 3; + // The number of incoming calls that have a completed with a non-OK status + int64 calls_failed = 4; + + // The last time a call was started on the server. + google.protobuf.Timestamp last_call_started_timestamp = 5; +} + +// Information about an actual connection. Pronounced "sock-ay". +message Socket { + // The identifier for the Socket. + SocketRef ref = 1; + + // Data specific to this Socket. + SocketData data = 2; + // The locally bound address. + Address local = 3; + // The remote bound address. May be absent. + Address remote = 4; + // Security details for this socket. May be absent if not available, or + // there is no security on the socket. + Security security = 5; + + // Optional, represents the name of the remote endpoint, if different than + // the original target name. + string remote_name = 6; +} + +// SocketData is data associated for a specific Socket. The fields present +// are specific to the implementation, so there may be minor differences in +// the semantics. (e.g. flow control windows) +message SocketData { + // The number of streams that have been started. + int64 streams_started = 1; + // The number of streams that have ended successfully: + // On client side, received frame with eos bit set; + // On server side, sent frame with eos bit set. + int64 streams_succeeded = 2; + // The number of streams that have ended unsuccessfully: + // On client side, ended without receiving frame with eos bit set; + // On server side, ended without sending frame with eos bit set. + int64 streams_failed = 3; + // The number of grpc messages successfully sent on this socket. + int64 messages_sent = 4; + // The number of grpc messages received on this socket. + int64 messages_received = 5; + + // The number of keep alives sent. This is typically implemented with HTTP/2 + // ping messages. + int64 keep_alives_sent = 6; + + // The last time a stream was created by this endpoint. Usually unset for + // servers. + google.protobuf.Timestamp last_local_stream_created_timestamp = 7; + // The last time a stream was created by the remote endpoint. Usually unset + // for clients. + google.protobuf.Timestamp last_remote_stream_created_timestamp = 8; + + // The last time a message was sent by this endpoint. + google.protobuf.Timestamp last_message_sent_timestamp = 9; + // The last time a message was received by this endpoint. + google.protobuf.Timestamp last_message_received_timestamp = 10; + + // The amount of window, granted to the local endpoint by the remote endpoint. + // This may be slightly out of date due to network latency. This does NOT + // include stream level or TCP level flow control info. + google.protobuf.Int64Value local_flow_control_window = 11; + + // The amount of window, granted to the remote endpoint by the local endpoint. + // This may be slightly out of date due to network latency. This does NOT + // include stream level or TCP level flow control info. + google.protobuf.Int64Value remote_flow_control_window = 12; + + // Socket options set on this socket. May be absent if 'summary' is set + // on GetSocketRequest. + repeated SocketOption option = 13; +} + +// Address represents the address used to create the socket. +message Address { + message TcpIpAddress { + // Either the IPv4 or IPv6 address in bytes. Will be either 4 bytes or 16 + // bytes in length. + bytes ip_address = 1; + // 0-64k, or -1 if not appropriate. + int32 port = 2; + } + // A Unix Domain Socket address. + message UdsAddress { + string filename = 1; + } + // An address type not included above. + message OtherAddress { + // The human readable version of the value. This value should be set. + string name = 1; + // The actual address message. + google.protobuf.Any value = 2; + } + + oneof address { + TcpIpAddress tcpip_address = 1; + UdsAddress uds_address = 2; + OtherAddress other_address = 3; + } +} + +// Security represents details about how secure the socket is. +message Security { + message Tls { + oneof cipher_suite { + // The cipher suite name in the RFC 4346 format: + // https://tools.ietf.org/html/rfc4346#appendix-C + string standard_name = 1; + // Some other way to describe the cipher suite if + // the RFC 4346 name is not available. + string other_name = 2; + } + // the certificate used by this endpoint. + bytes local_certificate = 3; + // the certificate used by the remote endpoint. + bytes remote_certificate = 4; + } + message OtherSecurity { + // The human readable version of the value. + string name = 1; + // The actual security details message. + google.protobuf.Any value = 2; + } + oneof model { + Tls tls = 1; + OtherSecurity other = 2; + } +} + +// SocketOption represents socket options for a socket. Specifically, these +// are the options returned by getsockopt(). +message SocketOption { + // The full name of the socket option. Typically this will be the upper case + // name, such as "SO_REUSEPORT". + string name = 1; + // The human readable value of this socket option. At least one of value or + // additional will be set. + string value = 2; + // Additional data associated with the socket option. At least one of value + // or additional will be set. + google.protobuf.Any additional = 3; +} + +// For use with SocketOption's additional field. This is primarily used for +// SO_RCVTIMEO and SO_SNDTIMEO +message SocketOptionTimeout { + google.protobuf.Duration duration = 1; +} + +// For use with SocketOption's additional field. This is primarily used for +// SO_LINGER. +message SocketOptionLinger { + // active maps to `struct linger.l_onoff` + bool active = 1; + // duration maps to `struct linger.l_linger` + google.protobuf.Duration duration = 2; +} + +// For use with SocketOption's additional field. Tcp info for +// SOL_TCP and TCP_INFO. +message SocketOptionTcpInfo { + uint32 tcpi_state = 1; + + uint32 tcpi_ca_state = 2; + uint32 tcpi_retransmits = 3; + uint32 tcpi_probes = 4; + uint32 tcpi_backoff = 5; + uint32 tcpi_options = 6; + uint32 tcpi_snd_wscale = 7; + uint32 tcpi_rcv_wscale = 8; + + uint32 tcpi_rto = 9; + uint32 tcpi_ato = 10; + uint32 tcpi_snd_mss = 11; + uint32 tcpi_rcv_mss = 12; + + uint32 tcpi_unacked = 13; + uint32 tcpi_sacked = 14; + uint32 tcpi_lost = 15; + uint32 tcpi_retrans = 16; + uint32 tcpi_fackets = 17; + + uint32 tcpi_last_data_sent = 18; + uint32 tcpi_last_ack_sent = 19; + uint32 tcpi_last_data_recv = 20; + uint32 tcpi_last_ack_recv = 21; + + uint32 tcpi_pmtu = 22; + uint32 tcpi_rcv_ssthresh = 23; + uint32 tcpi_rtt = 24; + uint32 tcpi_rttvar = 25; + uint32 tcpi_snd_ssthresh = 26; + uint32 tcpi_snd_cwnd = 27; + uint32 tcpi_advmss = 28; + uint32 tcpi_reordering = 29; +} + +// Channelz is a service exposed by gRPC servers that provides detailed debug +// information. +service Channelz { + // Gets all root channels (i.e. channels the application has directly + // created). This does not include subchannels nor non-top level channels. + rpc GetTopChannels(GetTopChannelsRequest) returns (GetTopChannelsResponse); + // Gets all servers that exist in the process. + rpc GetServers(GetServersRequest) returns (GetServersResponse); + // Returns a single Server, or else a NOT_FOUND code. + rpc GetServer(GetServerRequest) returns (GetServerResponse); + // Gets all server sockets that exist in the process. + rpc GetServerSockets(GetServerSocketsRequest) returns (GetServerSocketsResponse); + // Returns a single Channel, or else a NOT_FOUND code. + rpc GetChannel(GetChannelRequest) returns (GetChannelResponse); + // Returns a single Subchannel, or else a NOT_FOUND code. + rpc GetSubchannel(GetSubchannelRequest) returns (GetSubchannelResponse); + // Returns a single Socket or else a NOT_FOUND code. + rpc GetSocket(GetSocketRequest) returns (GetSocketResponse); +} + +message GetTopChannelsRequest { + // start_channel_id indicates that only channels at or above this id should be + // included in the results. + // To request the first page, this should be set to 0. To request + // subsequent pages, the client generates this value by adding 1 to + // the highest seen result ID. + int64 start_channel_id = 1; + + // If non-zero, the server will return a page of results containing + // at most this many items. If zero, the server will choose a + // reasonable page size. Must never be negative. + int64 max_results = 2; +} + +message GetTopChannelsResponse { + // list of channels that the connection detail service knows about. Sorted in + // ascending channel_id order. + // Must contain at least 1 result, otherwise 'end' must be true. + repeated Channel channel = 1; + // If set, indicates that the list of channels is the final list. Requesting + // more channels can only return more if they are created after this RPC + // completes. + bool end = 2; +} + +message GetServersRequest { + // start_server_id indicates that only servers at or above this id should be + // included in the results. + // To request the first page, this must be set to 0. To request + // subsequent pages, the client generates this value by adding 1 to + // the highest seen result ID. + int64 start_server_id = 1; + + // If non-zero, the server will return a page of results containing + // at most this many items. If zero, the server will choose a + // reasonable page size. Must never be negative. + int64 max_results = 2; +} + +message GetServersResponse { + // list of servers that the connection detail service knows about. Sorted in + // ascending server_id order. + // Must contain at least 1 result, otherwise 'end' must be true. + repeated Server server = 1; + // If set, indicates that the list of servers is the final list. Requesting + // more servers will only return more if they are created after this RPC + // completes. + bool end = 2; +} + +message GetServerRequest { + // server_id is the identifier of the specific server to get. + int64 server_id = 1; +} + +message GetServerResponse { + // The Server that corresponds to the requested server_id. This field + // should be set. + Server server = 1; +} + +message GetServerSocketsRequest { + int64 server_id = 1; + // start_socket_id indicates that only sockets at or above this id should be + // included in the results. + // To request the first page, this must be set to 0. To request + // subsequent pages, the client generates this value by adding 1 to + // the highest seen result ID. + int64 start_socket_id = 2; + + // If non-zero, the server will return a page of results containing + // at most this many items. If zero, the server will choose a + // reasonable page size. Must never be negative. + int64 max_results = 3; +} + +message GetServerSocketsResponse { + // list of socket refs that the connection detail service knows about. Sorted in + // ascending socket_id order. + // Must contain at least 1 result, otherwise 'end' must be true. + repeated SocketRef socket_ref = 1; + // If set, indicates that the list of sockets is the final list. Requesting + // more sockets will only return more if they are created after this RPC + // completes. + bool end = 2; +} + +message GetChannelRequest { + // channel_id is the identifier of the specific channel to get. + int64 channel_id = 1; +} + +message GetChannelResponse { + // The Channel that corresponds to the requested channel_id. This field + // should be set. + Channel channel = 1; +} + +message GetSubchannelRequest { + // subchannel_id is the identifier of the specific subchannel to get. + int64 subchannel_id = 1; +} + +message GetSubchannelResponse { + // The Subchannel that corresponds to the requested subchannel_id. This + // field should be set. + Subchannel subchannel = 1; +} + +message GetSocketRequest { + // socket_id is the identifier of the specific socket to get. + int64 socket_id = 1; + + // If true, the response will contain only high level information + // that is inexpensive to obtain. Fields thay may be omitted are + // documented. + bool summary = 2; +} + +message GetSocketResponse { + // The Socket that corresponds to the requested socket_id. This field + // should be set. + Socket socket = 1; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/a8f2bff45a301f3998ee512797f97f9e/grpc/health/v1/health.proto b/java/flight/flight-core/target/protoc-dependencies/a8f2bff45a301f3998ee512797f97f9e/grpc/health/v1/health.proto new file mode 100644 index 000000000000..38843ff1e73a --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/a8f2bff45a301f3998ee512797f97f9e/grpc/health/v1/health.proto @@ -0,0 +1,63 @@ +// Copyright 2015 The gRPC Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// The canonical version of this proto can be found at +// https://github.com/grpc/grpc-proto/blob/master/grpc/health/v1/health.proto + +syntax = "proto3"; + +package grpc.health.v1; + +option csharp_namespace = "Grpc.Health.V1"; +option go_package = "google.golang.org/grpc/health/grpc_health_v1"; +option java_multiple_files = true; +option java_outer_classname = "HealthProto"; +option java_package = "io.grpc.health.v1"; + +message HealthCheckRequest { + string service = 1; +} + +message HealthCheckResponse { + enum ServingStatus { + UNKNOWN = 0; + SERVING = 1; + NOT_SERVING = 2; + SERVICE_UNKNOWN = 3; // Used only by the Watch method. + } + ServingStatus status = 1; +} + +service Health { + // If the requested service is unknown, the call will fail with status + // NOT_FOUND. + rpc Check(HealthCheckRequest) returns (HealthCheckResponse); + + // Performs a watch for the serving status of the requested service. + // The server will immediately send back a message indicating the current + // serving status. It will then subsequently send a new message whenever + // the service's serving status changes. + // + // If the requested service is unknown when the call is received, the + // server will send a message setting the serving status to + // SERVICE_UNKNOWN but will *not* terminate the call. If at some + // future point, the serving status of the service becomes known, the + // server will send a new message with the service's serving status. + // + // If the call terminates with status UNIMPLEMENTED, then clients + // should assume this method is not supported and should not retry the + // call. If the call terminates with any other status (including OK), + // clients should retry the call with appropriate exponential backoff. + rpc Watch(HealthCheckRequest) returns (stream HealthCheckResponse); +} diff --git a/java/flight/flight-core/target/protoc-dependencies/a8f2bff45a301f3998ee512797f97f9e/grpc/reflection/v1alpha/reflection.proto b/java/flight/flight-core/target/protoc-dependencies/a8f2bff45a301f3998ee512797f97f9e/grpc/reflection/v1alpha/reflection.proto new file mode 100644 index 000000000000..8c5e06fe1485 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/a8f2bff45a301f3998ee512797f97f9e/grpc/reflection/v1alpha/reflection.proto @@ -0,0 +1,144 @@ +// Copyright 2016 The gRPC Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// Service exported by server reflection + + +// Warning: this entire file is deprecated. Use this instead: +// https://github.com/grpc/grpc-proto/blob/master/grpc/reflection/v1/reflection.proto + +syntax = "proto3"; + +package grpc.reflection.v1alpha; + +option deprecated = true; +option java_multiple_files = true; +option java_package = "io.grpc.reflection.v1alpha"; +option java_outer_classname = "ServerReflectionProto"; + +service ServerReflection { + // The reflection service is structured as a bidirectional stream, ensuring + // all related requests go to a single server. + rpc ServerReflectionInfo(stream ServerReflectionRequest) + returns (stream ServerReflectionResponse); +} + +// The message sent by the client when calling ServerReflectionInfo method. +message ServerReflectionRequest { + string host = 1; + // To use reflection service, the client should set one of the following + // fields in message_request. The server distinguishes requests by their + // defined field and then handles them using corresponding methods. + oneof message_request { + // Find a proto file by the file name. + string file_by_filename = 3; + + // Find the proto file that declares the given fully-qualified symbol name. + // This field should be a fully-qualified symbol name + // (e.g. .[.] or .). + string file_containing_symbol = 4; + + // Find the proto file which defines an extension extending the given + // message type with the given field number. + ExtensionRequest file_containing_extension = 5; + + // Finds the tag numbers used by all known extensions of extendee_type, and + // appends them to ExtensionNumberResponse in an undefined order. + // Its corresponding method is best-effort: it's not guaranteed that the + // reflection service will implement this method, and it's not guaranteed + // that this method will provide all extensions. Returns + // StatusCode::UNIMPLEMENTED if it's not implemented. + // This field should be a fully-qualified type name. The format is + // . + string all_extension_numbers_of_type = 6; + + // List the full names of registered services. The content will not be + // checked. + string list_services = 7; + } +} + +// The type name and extension number sent by the client when requesting +// file_containing_extension. +message ExtensionRequest { + // Fully-qualified type name. The format should be . + string containing_type = 1; + int32 extension_number = 2; +} + +// The message sent by the server to answer ServerReflectionInfo method. +message ServerReflectionResponse { + string valid_host = 1; + ServerReflectionRequest original_request = 2; + // The server set one of the following fields accroding to the message_request + // in the request. + oneof message_response { + // This message is used to answer file_by_filename, file_containing_symbol, + // file_containing_extension requests with transitive dependencies. As + // the repeated label is not allowed in oneof fields, we use a + // FileDescriptorResponse message to encapsulate the repeated fields. + // The reflection service is allowed to avoid sending FileDescriptorProtos + // that were previously sent in response to earlier requests in the stream. + FileDescriptorResponse file_descriptor_response = 4; + + // This message is used to answer all_extension_numbers_of_type requst. + ExtensionNumberResponse all_extension_numbers_response = 5; + + // This message is used to answer list_services request. + ListServiceResponse list_services_response = 6; + + // This message is used when an error occurs. + ErrorResponse error_response = 7; + } +} + +// Serialized FileDescriptorProto messages sent by the server answering +// a file_by_filename, file_containing_symbol, or file_containing_extension +// request. +message FileDescriptorResponse { + // Serialized FileDescriptorProto messages. We avoid taking a dependency on + // descriptor.proto, which uses proto2 only features, by making them opaque + // bytes instead. + repeated bytes file_descriptor_proto = 1; +} + +// A list of extension numbers sent by the server answering +// all_extension_numbers_of_type request. +message ExtensionNumberResponse { + // Full name of the base type, including the package name. The format + // is . + string base_type_name = 1; + repeated int32 extension_number = 2; +} + +// A list of ServiceResponse sent by the server answering list_services request. +message ListServiceResponse { + // The information of each service may be expanded in the future, so we use + // ServiceResponse message to encapsulate it. + repeated ServiceResponse service = 1; +} + +// The information of a single service used by ListServiceResponse to answer +// list_services request. +message ServiceResponse { + // Full name of a registered service, including its package name. The format + // is . + string name = 1; +} + +// The error code and error message sent by the server when an error occurs. +message ErrorResponse { + // This field uses the error codes defined in grpc::StatusCode. + int32 error_code = 1; + string error_message = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/any.proto b/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/any.proto new file mode 100644 index 000000000000..c89243178afa --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/any.proto @@ -0,0 +1,161 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option go_package = "google.golang.org/protobuf/types/known/anypb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "AnyProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; + +// `Any` contains an arbitrary serialized protocol buffer message along with a +// URL that describes the type of the serialized message. +// +// Protobuf library provides support to pack/unpack Any values in the form +// of utility functions or additional generated methods of the Any type. +// +// Example 1: Pack and unpack a message in C++. +// +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } +// +// Example 2: Pack and unpack a message in Java. +// +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// // or ... +// if (any.isSameTypeAs(Foo.getDefaultInstance())) { +// foo = any.unpack(Foo.getDefaultInstance()); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := anypb.New(foo) +// if err != nil { +// ... +// } +// ... +// foo := &pb.Foo{} +// if err := any.UnmarshalTo(foo); err != nil { +// ... +// } +// +// The pack methods provided by protobuf library will by default use +// 'type.googleapis.com/full.type.name' as the type URL and the unpack +// methods only use the fully qualified type name after the last '/' +// in the type URL, for example "foo.bar.com/x/y.z" will yield type +// name "y.z". +// +// JSON +// ==== +// The JSON representation of an `Any` value uses the regular +// representation of the deserialized, embedded message, with an +// additional field `@type` which contains the type URL. Example: +// +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } +// +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } +// +// If the embedded message type is well-known and has a custom JSON +// representation, that representation will be embedded adding a field +// `value` which holds the custom JSON in addition to the `@type` +// field. Example (for message [google.protobuf.Duration][]): +// +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// +message Any { + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + // + string type_url = 1; + + // Must be a valid serialized protocol buffer of the above specified type. + bytes value = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/api.proto b/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/api.proto new file mode 100644 index 000000000000..422235167018 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/api.proto @@ -0,0 +1,207 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +import "google/protobuf/source_context.proto"; +import "google/protobuf/type.proto"; + +option java_package = "com.google.protobuf"; +option java_outer_classname = "ApiProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option go_package = "google.golang.org/protobuf/types/known/apipb"; + +// Api is a light-weight descriptor for an API Interface. +// +// Interfaces are also described as "protocol buffer services" in some contexts, +// such as by the "service" keyword in a .proto file, but they are different +// from API Services, which represent a concrete implementation of an interface +// as opposed to simply a description of methods and bindings. They are also +// sometimes simply referred to as "APIs" in other contexts, such as the name of +// this message itself. See https://cloud.google.com/apis/design/glossary for +// detailed terminology. +message Api { + // The fully qualified name of this interface, including package name + // followed by the interface's simple name. + string name = 1; + + // The methods of this interface, in unspecified order. + repeated Method methods = 2; + + // Any metadata attached to the interface. + repeated Option options = 3; + + // A version string for this interface. If specified, must have the form + // `major-version.minor-version`, as in `1.10`. If the minor version is + // omitted, it defaults to zero. If the entire version field is empty, the + // major version is derived from the package name, as outlined below. If the + // field is not empty, the version in the package name will be verified to be + // consistent with what is provided here. + // + // The versioning schema uses [semantic + // versioning](http://semver.org) where the major version number + // indicates a breaking change and the minor version an additive, + // non-breaking change. Both version numbers are signals to users + // what to expect from different versions, and should be carefully + // chosen based on the product plan. + // + // The major version is also reflected in the package name of the + // interface, which must end in `v`, as in + // `google.feature.v1`. For major versions 0 and 1, the suffix can + // be omitted. Zero major versions must only be used for + // experimental, non-GA interfaces. + // + string version = 4; + + // Source context for the protocol buffer service represented by this + // message. + SourceContext source_context = 5; + + // Included interfaces. See [Mixin][]. + repeated Mixin mixins = 6; + + // The source syntax of the service. + Syntax syntax = 7; +} + +// Method represents a method of an API interface. +message Method { + // The simple name of this method. + string name = 1; + + // A URL of the input message type. + string request_type_url = 2; + + // If true, the request is streamed. + bool request_streaming = 3; + + // The URL of the output message type. + string response_type_url = 4; + + // If true, the response is streamed. + bool response_streaming = 5; + + // Any metadata attached to the method. + repeated Option options = 6; + + // The source syntax of this method. + Syntax syntax = 7; +} + +// Declares an API Interface to be included in this interface. The including +// interface must redeclare all the methods from the included interface, but +// documentation and options are inherited as follows: +// +// - If after comment and whitespace stripping, the documentation +// string of the redeclared method is empty, it will be inherited +// from the original method. +// +// - Each annotation belonging to the service config (http, +// visibility) which is not set in the redeclared method will be +// inherited. +// +// - If an http annotation is inherited, the path pattern will be +// modified as follows. Any version prefix will be replaced by the +// version of the including interface plus the [root][] path if +// specified. +// +// Example of a simple mixin: +// +// package google.acl.v1; +// service AccessControl { +// // Get the underlying ACL object. +// rpc GetAcl(GetAclRequest) returns (Acl) { +// option (google.api.http).get = "/v1/{resource=**}:getAcl"; +// } +// } +// +// package google.storage.v2; +// service Storage { +// rpc GetAcl(GetAclRequest) returns (Acl); +// +// // Get a data record. +// rpc GetData(GetDataRequest) returns (Data) { +// option (google.api.http).get = "/v2/{resource=**}"; +// } +// } +// +// Example of a mixin configuration: +// +// apis: +// - name: google.storage.v2.Storage +// mixins: +// - name: google.acl.v1.AccessControl +// +// The mixin construct implies that all methods in `AccessControl` are +// also declared with same name and request/response types in +// `Storage`. A documentation generator or annotation processor will +// see the effective `Storage.GetAcl` method after inherting +// documentation and annotations as follows: +// +// service Storage { +// // Get the underlying ACL object. +// rpc GetAcl(GetAclRequest) returns (Acl) { +// option (google.api.http).get = "/v2/{resource=**}:getAcl"; +// } +// ... +// } +// +// Note how the version in the path pattern changed from `v1` to `v2`. +// +// If the `root` field in the mixin is specified, it should be a +// relative path under which inherited HTTP paths are placed. Example: +// +// apis: +// - name: google.storage.v2.Storage +// mixins: +// - name: google.acl.v1.AccessControl +// root: acls +// +// This implies the following inherited HTTP annotation: +// +// service Storage { +// // Get the underlying ACL object. +// rpc GetAcl(GetAclRequest) returns (Acl) { +// option (google.api.http).get = "/v2/acls/{resource=**}:getAcl"; +// } +// ... +// } +message Mixin { + // The fully qualified name of the interface which is included. + string name = 1; + + // If non-empty specifies a path under which inherited HTTP paths + // are rooted. + string root = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/descriptor.proto b/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/descriptor.proto new file mode 100644 index 000000000000..513b3b6f8134 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/descriptor.proto @@ -0,0 +1,1028 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// The messages in this file describe the definitions found in .proto files. +// A valid .proto file can be translated directly to a FileDescriptorProto +// without any other information (e.g. without reading its imports). + +syntax = "proto2"; + +package google.protobuf; + +option go_package = "google.golang.org/protobuf/types/descriptorpb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "DescriptorProtos"; +option csharp_namespace = "Google.Protobuf.Reflection"; +option objc_class_prefix = "GPB"; +option cc_enable_arenas = true; + +// descriptor.proto must be optimized for speed because reflection-based +// algorithms don't work during bootstrapping. +option optimize_for = SPEED; + +// The protocol compiler can output a FileDescriptorSet containing the .proto +// files it parses. +message FileDescriptorSet { + repeated FileDescriptorProto file = 1; +} + +// Describes a complete .proto file. +message FileDescriptorProto { + optional string name = 1; // file name, relative to root of source tree + optional string package = 2; // e.g. "foo", "foo.bar", etc. + + // Names of files imported by this file. + repeated string dependency = 3; + // Indexes of the public imported files in the dependency list above. + repeated int32 public_dependency = 10; + // Indexes of the weak imported files in the dependency list. + // For Google-internal migration only. Do not use. + repeated int32 weak_dependency = 11; + + // All top-level definitions in this file. + repeated DescriptorProto message_type = 4; + repeated EnumDescriptorProto enum_type = 5; + repeated ServiceDescriptorProto service = 6; + repeated FieldDescriptorProto extension = 7; + + optional FileOptions options = 8; + + // This field contains optional information about the original source code. + // You may safely remove this entire field without harming runtime + // functionality of the descriptors -- the information is needed only by + // development tools. + optional SourceCodeInfo source_code_info = 9; + + // The syntax of the proto file. + // The supported values are "proto2", "proto3", and "editions". + // + // If `edition` is present, this value must be "editions". + optional string syntax = 12; + + // The edition of the proto file, which is an opaque string. + optional string edition = 13; +} + +// Describes a message type. +message DescriptorProto { + optional string name = 1; + + repeated FieldDescriptorProto field = 2; + repeated FieldDescriptorProto extension = 6; + + repeated DescriptorProto nested_type = 3; + repeated EnumDescriptorProto enum_type = 4; + + message ExtensionRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + + optional ExtensionRangeOptions options = 3; + } + repeated ExtensionRange extension_range = 5; + + repeated OneofDescriptorProto oneof_decl = 8; + + optional MessageOptions options = 7; + + // Range of reserved tag numbers. Reserved tag numbers may not be used by + // fields or extension ranges in the same message. Reserved ranges may + // not overlap. + message ReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + } + repeated ReservedRange reserved_range = 9; + // Reserved field names, which may not be used by fields in the same message. + // A given name may only be reserved once. + repeated string reserved_name = 10; +} + +message ExtensionRangeOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + message Declaration { + // The extension number declared within the extension range. + optional int32 number = 1; + + // The fully-qualified name of the extension field. There must be a leading + // dot in front of the full name. + optional string full_name = 2; + + // The fully-qualified type name of the extension field. Unlike + // Metadata.type, Declaration.type must have a leading dot for messages + // and enums. + optional string type = 3; + + // Deprecated. Please use "repeated". + optional bool is_repeated = 4 [deprecated = true]; + + // If true, indicates that the number is reserved in the extension range, + // and any extension field with the number will fail to compile. Set this + // when a declared extension field is deleted. + optional bool reserved = 5; + + // If true, indicates that the extension must be defined as repeated. + // Otherwise the extension must be defined as optional. + optional bool repeated = 6; + } + + // go/protobuf-stripping-extension-declarations + // Like Metadata, but we use a repeated field to hold all extension + // declarations. This should avoid the size increases of transforming a large + // extension range into small ranges in generated binaries. + repeated Declaration declaration = 2 [retention = RETENTION_SOURCE]; + + // The verification state of the extension range. + enum VerificationState { + // All the extensions of the range must be declared. + DECLARATION = 0; + UNVERIFIED = 1; + } + + // The verification state of the range. + // TODO(b/278783756): flip the default to DECLARATION once all empty ranges + // are marked as UNVERIFIED. + optional VerificationState verification = 3 [default = UNVERIFIED]; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +// Describes a field within a message. +message FieldDescriptorProto { + enum Type { + // 0 is reserved for errors. + // Order is weird for historical reasons. + TYPE_DOUBLE = 1; + TYPE_FLOAT = 2; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + // negative values are likely. + TYPE_INT64 = 3; + TYPE_UINT64 = 4; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + // negative values are likely. + TYPE_INT32 = 5; + TYPE_FIXED64 = 6; + TYPE_FIXED32 = 7; + TYPE_BOOL = 8; + TYPE_STRING = 9; + // Tag-delimited aggregate. + // Group type is deprecated and not supported in proto3. However, Proto3 + // implementations should still be able to parse the group wire format and + // treat group fields as unknown fields. + TYPE_GROUP = 10; + TYPE_MESSAGE = 11; // Length-delimited aggregate. + + // New in version 2. + TYPE_BYTES = 12; + TYPE_UINT32 = 13; + TYPE_ENUM = 14; + TYPE_SFIXED32 = 15; + TYPE_SFIXED64 = 16; + TYPE_SINT32 = 17; // Uses ZigZag encoding. + TYPE_SINT64 = 18; // Uses ZigZag encoding. + } + + enum Label { + // 0 is reserved for errors + LABEL_OPTIONAL = 1; + LABEL_REQUIRED = 2; + LABEL_REPEATED = 3; + } + + optional string name = 1; + optional int32 number = 3; + optional Label label = 4; + + // If type_name is set, this need not be set. If both this and type_name + // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + optional Type type = 5; + + // For message and enum types, this is the name of the type. If the name + // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + // rules are used to find the type (i.e. first the nested types within this + // message are searched, then within the parent, on up to the root + // namespace). + optional string type_name = 6; + + // For extensions, this is the name of the type being extended. It is + // resolved in the same manner as type_name. + optional string extendee = 2; + + // For numeric types, contains the original text representation of the value. + // For booleans, "true" or "false". + // For strings, contains the default text contents (not escaped in any way). + // For bytes, contains the C escaped value. All bytes >= 128 are escaped. + optional string default_value = 7; + + // If set, gives the index of a oneof in the containing type's oneof_decl + // list. This field is a member of that oneof. + optional int32 oneof_index = 9; + + // JSON name of this field. The value is set by protocol compiler. If the + // user has set a "json_name" option on this field, that option's value + // will be used. Otherwise, it's deduced from the field's name by converting + // it to camelCase. + optional string json_name = 10; + + optional FieldOptions options = 8; + + // If true, this is a proto3 "optional". When a proto3 field is optional, it + // tracks presence regardless of field type. + // + // When proto3_optional is true, this field must be belong to a oneof to + // signal to old proto3 clients that presence is tracked for this field. This + // oneof is known as a "synthetic" oneof, and this field must be its sole + // member (each proto3 optional field gets its own synthetic oneof). Synthetic + // oneofs exist in the descriptor only, and do not generate any API. Synthetic + // oneofs must be ordered after all "real" oneofs. + // + // For message fields, proto3_optional doesn't create any semantic change, + // since non-repeated message fields always track presence. However it still + // indicates the semantic detail of whether the user wrote "optional" or not. + // This can be useful for round-tripping the .proto file. For consistency we + // give message fields a synthetic oneof also, even though it is not required + // to track presence. This is especially important because the parser can't + // tell if a field is a message or an enum, so it must always create a + // synthetic oneof. + // + // Proto2 optional fields do not set this flag, because they already indicate + // optional with `LABEL_OPTIONAL`. + optional bool proto3_optional = 17; +} + +// Describes a oneof. +message OneofDescriptorProto { + optional string name = 1; + optional OneofOptions options = 2; +} + +// Describes an enum type. +message EnumDescriptorProto { + optional string name = 1; + + repeated EnumValueDescriptorProto value = 2; + + optional EnumOptions options = 3; + + // Range of reserved numeric values. Reserved values may not be used by + // entries in the same enum. Reserved ranges may not overlap. + // + // Note that this is distinct from DescriptorProto.ReservedRange in that it + // is inclusive such that it can appropriately represent the entire int32 + // domain. + message EnumReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Inclusive. + } + + // Range of reserved numeric values. Reserved numeric values may not be used + // by enum values in the same enum declaration. Reserved ranges may not + // overlap. + repeated EnumReservedRange reserved_range = 4; + + // Reserved enum value names, which may not be reused. A given name may only + // be reserved once. + repeated string reserved_name = 5; +} + +// Describes a value within an enum. +message EnumValueDescriptorProto { + optional string name = 1; + optional int32 number = 2; + + optional EnumValueOptions options = 3; +} + +// Describes a service. +message ServiceDescriptorProto { + optional string name = 1; + repeated MethodDescriptorProto method = 2; + + optional ServiceOptions options = 3; +} + +// Describes a method of a service. +message MethodDescriptorProto { + optional string name = 1; + + // Input and output type names. These are resolved in the same way as + // FieldDescriptorProto.type_name, but must refer to a message type. + optional string input_type = 2; + optional string output_type = 3; + + optional MethodOptions options = 4; + + // Identifies if client streams multiple client messages + optional bool client_streaming = 5 [default = false]; + // Identifies if server streams multiple server messages + optional bool server_streaming = 6 [default = false]; +} + +// =================================================================== +// Options + +// Each of the definitions above may have "options" attached. These are +// just annotations which may cause code to be generated slightly differently +// or may contain hints for code that manipulates protocol messages. +// +// Clients may define custom options as extensions of the *Options messages. +// These extensions may not yet be known at parsing time, so the parser cannot +// store the values in them. Instead it stores them in a field in the *Options +// message called uninterpreted_option. This field must have the same name +// across all *Options messages. We then use this field to populate the +// extensions when we build a descriptor, at which point all protos have been +// parsed and so all extensions are known. +// +// Extension numbers for custom options may be chosen as follows: +// * For options which will only be used within a single application or +// organization, or for experimental options, use field numbers 50000 +// through 99999. It is up to you to ensure that you do not use the +// same number for multiple options. +// * For options which will be published and used publicly by multiple +// independent entities, e-mail protobuf-global-extension-registry@google.com +// to reserve extension numbers. Simply provide your project name (e.g. +// Objective-C plugin) and your project website (if available) -- there's no +// need to explain how you intend to use them. Usually you only need one +// extension number. You can declare multiple options with only one extension +// number by putting them in a sub-message. See the Custom Options section of +// the docs for examples: +// https://developers.google.com/protocol-buffers/docs/proto#options +// If this turns out to be popular, a web service will be set up +// to automatically assign option numbers. + +message FileOptions { + + // Sets the Java package where classes generated from this .proto will be + // placed. By default, the proto package is used, but this is often + // inappropriate because proto packages do not normally start with backwards + // domain names. + optional string java_package = 1; + + // Controls the name of the wrapper Java class generated for the .proto file. + // That class will always contain the .proto file's getDescriptor() method as + // well as any top-level extensions defined in the .proto file. + // If java_multiple_files is disabled, then all the other classes from the + // .proto file will be nested inside the single wrapper outer class. + optional string java_outer_classname = 8; + + // If enabled, then the Java code generator will generate a separate .java + // file for each top-level message, enum, and service defined in the .proto + // file. Thus, these types will *not* be nested inside the wrapper class + // named by java_outer_classname. However, the wrapper class will still be + // generated to contain the file's getDescriptor() method as well as any + // top-level extensions defined in the file. + optional bool java_multiple_files = 10 [default = false]; + + // This option does nothing. + optional bool java_generate_equals_and_hash = 20 [deprecated=true]; + + // If set true, then the Java2 code generator will generate code that + // throws an exception whenever an attempt is made to assign a non-UTF-8 + // byte sequence to a string field. + // Message reflection will do the same. + // However, an extension field still accepts non-UTF-8 byte sequences. + // This option has no effect on when used with the lite runtime. + optional bool java_string_check_utf8 = 27 [default = false]; + + // Generated classes can be optimized for speed or code size. + enum OptimizeMode { + SPEED = 1; // Generate complete code for parsing, serialization, + // etc. + CODE_SIZE = 2; // Use ReflectionOps to implement these methods. + LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime. + } + optional OptimizeMode optimize_for = 9 [default = SPEED]; + + // Sets the Go package where structs generated from this .proto will be + // placed. If omitted, the Go package will be derived from the following: + // - The basename of the package import path, if provided. + // - Otherwise, the package statement in the .proto file, if present. + // - Otherwise, the basename of the .proto file, without extension. + optional string go_package = 11; + + // Should generic services be generated in each language? "Generic" services + // are not specific to any particular RPC system. They are generated by the + // main code generators in each language (without additional plugins). + // Generic services were the only kind of service generation supported by + // early versions of google.protobuf. + // + // Generic services are now considered deprecated in favor of using plugins + // that generate code specific to your particular RPC system. Therefore, + // these default to false. Old code which depends on generic services should + // explicitly set them to true. + optional bool cc_generic_services = 16 [default = false]; + optional bool java_generic_services = 17 [default = false]; + optional bool py_generic_services = 18 [default = false]; + optional bool php_generic_services = 42 [default = false]; + + // Is this file deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for everything in the file, or it will be completely ignored; in the very + // least, this is a formalization for deprecating files. + optional bool deprecated = 23 [default = false]; + + // Enables the use of arenas for the proto messages in this file. This applies + // only to generated classes for C++. + optional bool cc_enable_arenas = 31 [default = true]; + + // Sets the objective c class prefix which is prepended to all objective c + // generated classes from this .proto. There is no default. + optional string objc_class_prefix = 36; + + // Namespace for generated classes; defaults to the package. + optional string csharp_namespace = 37; + + // By default Swift generators will take the proto package and CamelCase it + // replacing '.' with underscore and use that to prefix the types/symbols + // defined. When this options is provided, they will use this value instead + // to prefix the types/symbols defined. + optional string swift_prefix = 39; + + // Sets the php class prefix which is prepended to all php generated classes + // from this .proto. Default is empty. + optional string php_class_prefix = 40; + + // Use this option to change the namespace of php generated classes. Default + // is empty. When this option is empty, the package name will be used for + // determining the namespace. + optional string php_namespace = 41; + + // Use this option to change the namespace of php generated metadata classes. + // Default is empty. When this option is empty, the proto file name will be + // used for determining the namespace. + optional string php_metadata_namespace = 44; + + // Use this option to change the package of ruby generated classes. Default + // is empty. When this option is not set, the package name will be used for + // determining the ruby package. + optional string ruby_package = 45; + + // The parser stores options it doesn't recognize here. + // See the documentation for the "Options" section above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. + // See the documentation for the "Options" section above. + extensions 1000 to max; + + reserved 38; +} + +message MessageOptions { + // Set true to use the old proto1 MessageSet wire format for extensions. + // This is provided for backwards-compatibility with the MessageSet wire + // format. You should not use this for any other reason: It's less + // efficient, has fewer features, and is more complicated. + // + // The message must be defined exactly as follows: + // message Foo { + // option message_set_wire_format = true; + // extensions 4 to max; + // } + // Note that the message cannot have any defined fields; MessageSets only + // have extensions. + // + // All extensions of your type must be singular messages; e.g. they cannot + // be int32s, enums, or repeated messages. + // + // Because this is an option, the above two restrictions are not enforced by + // the protocol compiler. + optional bool message_set_wire_format = 1 [default = false]; + + // Disables the generation of the standard "descriptor()" accessor, which can + // conflict with a field of the same name. This is meant to make migration + // from proto1 easier; new code should avoid fields named "descriptor". + optional bool no_standard_descriptor_accessor = 2 [default = false]; + + // Is this message deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the message, or it will be completely ignored; in the very least, + // this is a formalization for deprecating messages. + optional bool deprecated = 3 [default = false]; + + reserved 4, 5, 6; + + // NOTE: Do not set the option in .proto files. Always use the maps syntax + // instead. The option should only be implicitly set by the proto compiler + // parser. + // + // Whether the message is an automatically generated map entry type for the + // maps field. + // + // For maps fields: + // map map_field = 1; + // The parsed descriptor looks like: + // message MapFieldEntry { + // option map_entry = true; + // optional KeyType key = 1; + // optional ValueType value = 2; + // } + // repeated MapFieldEntry map_field = 1; + // + // Implementations may choose not to generate the map_entry=true message, but + // use a native map in the target language to hold the keys and values. + // The reflection APIs in such implementations still need to work as + // if the field is a repeated message field. + optional bool map_entry = 7; + + reserved 8; // javalite_serializable + reserved 9; // javanano_as_lite + + // Enable the legacy handling of JSON field name conflicts. This lowercases + // and strips underscored from the fields before comparison in proto3 only. + // The new behavior takes `json_name` into account and applies to proto2 as + // well. + // + // This should only be used as a temporary measure against broken builds due + // to the change in behavior for JSON field name conflicts. + // + // TODO(b/261750190) This is legacy behavior we plan to remove once downstream + // teams have had time to migrate. + optional bool deprecated_legacy_json_field_conflicts = 11 [deprecated = true]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message FieldOptions { + // The ctype option instructs the C++ code generator to use a different + // representation of the field than it normally would. See the specific + // options below. This option is only implemented to support use of + // [ctype=CORD] and [ctype=STRING] (the default) on non-repeated fields of + // type "bytes" in the open source release -- sorry, we'll try to include + // other types in a future version! + optional CType ctype = 1 [default = STRING]; + enum CType { + // Default mode. + STRING = 0; + + // The option [ctype=CORD] may be applied to a non-repeated field of type + // "bytes". It indicates that in C++, the data should be stored in a Cord + // instead of a string. For very large strings, this may reduce memory + // fragmentation. It may also allow better performance when parsing from a + // Cord, or when parsing with aliasing enabled, as the parsed Cord may then + // alias the original buffer. + CORD = 1; + + STRING_PIECE = 2; + } + // The packed option can be enabled for repeated primitive fields to enable + // a more efficient representation on the wire. Rather than repeatedly + // writing the tag and type for each element, the entire array is encoded as + // a single length-delimited blob. In proto3, only explicit setting it to + // false will avoid using packed encoding. + optional bool packed = 2; + + // The jstype option determines the JavaScript type used for values of the + // field. The option is permitted only for 64 bit integral and fixed types + // (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + // is represented as JavaScript string, which avoids loss of precision that + // can happen when a large value is converted to a floating point JavaScript. + // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + // use the JavaScript "number" type. The behavior of the default option + // JS_NORMAL is implementation dependent. + // + // This option is an enum to permit additional types to be added, e.g. + // goog.math.Integer. + optional JSType jstype = 6 [default = JS_NORMAL]; + enum JSType { + // Use the default type. + JS_NORMAL = 0; + + // Use JavaScript strings. + JS_STRING = 1; + + // Use JavaScript numbers. + JS_NUMBER = 2; + } + + // Should this field be parsed lazily? Lazy applies only to message-type + // fields. It means that when the outer message is initially parsed, the + // inner message's contents will not be parsed but instead stored in encoded + // form. The inner message will actually be parsed when it is first accessed. + // + // This is only a hint. Implementations are free to choose whether to use + // eager or lazy parsing regardless of the value of this option. However, + // setting this option true suggests that the protocol author believes that + // using lazy parsing on this field is worth the additional bookkeeping + // overhead typically needed to implement it. + // + // This option does not affect the public interface of any generated code; + // all method signatures remain the same. Furthermore, thread-safety of the + // interface is not affected by this option; const methods remain safe to + // call from multiple threads concurrently, while non-const methods continue + // to require exclusive access. + // + // Note that implementations may choose not to check required fields within + // a lazy sub-message. That is, calling IsInitialized() on the outer message + // may return true even if the inner message has missing required fields. + // This is necessary because otherwise the inner message would have to be + // parsed in order to perform the check, defeating the purpose of lazy + // parsing. An implementation which chooses not to check required fields + // must be consistent about it. That is, for any particular sub-message, the + // implementation must either *always* check its required fields, or *never* + // check its required fields, regardless of whether or not the message has + // been parsed. + // + // As of May 2022, lazy verifies the contents of the byte stream during + // parsing. An invalid byte stream will cause the overall parsing to fail. + optional bool lazy = 5 [default = false]; + + // unverified_lazy does no correctness checks on the byte stream. This should + // only be used where lazy with verification is prohibitive for performance + // reasons. + optional bool unverified_lazy = 15 [default = false]; + + // Is this field deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for accessors, or it will be completely ignored; in the very least, this + // is a formalization for deprecating fields. + optional bool deprecated = 3 [default = false]; + + // For Google-internal migration only. Do not use. + optional bool weak = 10 [default = false]; + + // Indicate that the field value should not be printed out when using debug + // formats, e.g. when the field contains sensitive credentials. + optional bool debug_redact = 16 [default = false]; + + // If set to RETENTION_SOURCE, the option will be omitted from the binary. + // Note: as of January 2023, support for this is in progress and does not yet + // have an effect (b/264593489). + enum OptionRetention { + RETENTION_UNKNOWN = 0; + RETENTION_RUNTIME = 1; + RETENTION_SOURCE = 2; + } + + optional OptionRetention retention = 17; + + // This indicates the types of entities that the field may apply to when used + // as an option. If it is unset, then the field may be freely used as an + // option on any kind of entity. Note: as of January 2023, support for this is + // in progress and does not yet have an effect (b/264593489). + enum OptionTargetType { + TARGET_TYPE_UNKNOWN = 0; + TARGET_TYPE_FILE = 1; + TARGET_TYPE_EXTENSION_RANGE = 2; + TARGET_TYPE_MESSAGE = 3; + TARGET_TYPE_FIELD = 4; + TARGET_TYPE_ONEOF = 5; + TARGET_TYPE_ENUM = 6; + TARGET_TYPE_ENUM_ENTRY = 7; + TARGET_TYPE_SERVICE = 8; + TARGET_TYPE_METHOD = 9; + } + + optional OptionTargetType target = 18 [deprecated = true]; + repeated OptionTargetType targets = 19; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; + + reserved 4; // removed jtype +} + +message OneofOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumOptions { + + // Set this option to true to allow mapping different tag names to the same + // value. + optional bool allow_alias = 2; + + // Is this enum deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum, or it will be completely ignored; in the very least, this + // is a formalization for deprecating enums. + optional bool deprecated = 3 [default = false]; + + reserved 5; // javanano_as_lite + + // Enable the legacy handling of JSON field name conflicts. This lowercases + // and strips underscored from the fields before comparison in proto3 only. + // The new behavior takes `json_name` into account and applies to proto2 as + // well. + // TODO(b/261750190) Remove this legacy behavior once downstream teams have + // had time to migrate. + optional bool deprecated_legacy_json_field_conflicts = 6 [deprecated = true]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumValueOptions { + // Is this enum value deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum value, or it will be completely ignored; in the very least, + // this is a formalization for deprecating enum values. + optional bool deprecated = 1 [default = false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message ServiceOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this service deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the service, or it will be completely ignored; in the very least, + // this is a formalization for deprecating services. + optional bool deprecated = 33 [default = false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message MethodOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this method deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the method, or it will be completely ignored; in the very least, + // this is a formalization for deprecating methods. + optional bool deprecated = 33 [default = false]; + + // Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + // or neither? HTTP based RPC implementation may choose GET verb for safe + // methods, and PUT verb for idempotent methods instead of the default POST. + enum IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0; + NO_SIDE_EFFECTS = 1; // implies idempotent + IDEMPOTENT = 2; // idempotent, but may have side effects + } + optional IdempotencyLevel idempotency_level = 34 + [default = IDEMPOTENCY_UNKNOWN]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +// A message representing a option the parser does not recognize. This only +// appears in options protos created by the compiler::Parser class. +// DescriptorPool resolves these when building Descriptor objects. Therefore, +// options protos in descriptor objects (e.g. returned by Descriptor::options(), +// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions +// in them. +message UninterpretedOption { + // The name of the uninterpreted option. Each string represents a segment in + // a dot-separated name. is_extension is true iff a segment represents an + // extension (denoted with parentheses in options specs in .proto files). + // E.g.,{ ["foo", false], ["bar.baz", true], ["moo", false] } represents + // "foo.(bar.baz).moo". + message NamePart { + required string name_part = 1; + required bool is_extension = 2; + } + repeated NamePart name = 2; + + // The value of the uninterpreted option, in whatever type the tokenizer + // identified it as during parsing. Exactly one of these should be set. + optional string identifier_value = 3; + optional uint64 positive_int_value = 4; + optional int64 negative_int_value = 5; + optional double double_value = 6; + optional bytes string_value = 7; + optional string aggregate_value = 8; +} + +// =================================================================== +// Optional source code info + +// Encapsulates information about the original source file from which a +// FileDescriptorProto was generated. +message SourceCodeInfo { + // A Location identifies a piece of source code in a .proto file which + // corresponds to a particular definition. This information is intended + // to be useful to IDEs, code indexers, documentation generators, and similar + // tools. + // + // For example, say we have a file like: + // message Foo { + // optional string foo = 1; + // } + // Let's look at just the field definition: + // optional string foo = 1; + // ^ ^^ ^^ ^ ^^^ + // a bc de f ghi + // We have the following locations: + // span path represents + // [a,i) [ 4, 0, 2, 0 ] The whole field definition. + // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + // [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + // [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + // + // Notes: + // - A location may refer to a repeated field itself (i.e. not to any + // particular index within it). This is used whenever a set of elements are + // logically enclosed in a single code segment. For example, an entire + // extend block (possibly containing multiple extension definitions) will + // have an outer location whose path refers to the "extensions" repeated + // field without an index. + // - Multiple locations may have the same path. This happens when a single + // logical declaration is spread out across multiple places. The most + // obvious example is the "extend" block again -- there may be multiple + // extend blocks in the same scope, each of which will have the same path. + // - A location's span is not always a subset of its parent's span. For + // example, the "extendee" of an extension declaration appears at the + // beginning of the "extend" block and is shared by all extensions within + // the block. + // - Just because a location's span is a subset of some other location's span + // does not mean that it is a descendant. For example, a "group" defines + // both a type and a field in a single declaration. Thus, the locations + // corresponding to the type and field and their components will overlap. + // - Code which tries to interpret locations should probably be designed to + // ignore those that it doesn't understand, as more types of locations could + // be recorded in the future. + repeated Location location = 1; + message Location { + // Identifies which part of the FileDescriptorProto was defined at this + // location. + // + // Each element is a field number or an index. They form a path from + // the root FileDescriptorProto to the place where the definition occurs. + // For example, this path: + // [ 4, 3, 2, 7, 1 ] + // refers to: + // file.message_type(3) // 4, 3 + // .field(7) // 2, 7 + // .name() // 1 + // This is because FileDescriptorProto.message_type has field number 4: + // repeated DescriptorProto message_type = 4; + // and DescriptorProto.field has field number 2: + // repeated FieldDescriptorProto field = 2; + // and FieldDescriptorProto.name has field number 1: + // optional string name = 1; + // + // Thus, the above path gives the location of a field name. If we removed + // the last element: + // [ 4, 3, 2, 7 ] + // this path refers to the whole field declaration (from the beginning + // of the label to the terminating semicolon). + repeated int32 path = 1 [packed = true]; + + // Always has exactly three or four elements: start line, start column, + // end line (optional, otherwise assumed same as start line), end column. + // These are packed into a single field for efficiency. Note that line + // and column numbers are zero-based -- typically you will want to add + // 1 to each before displaying to a user. + repeated int32 span = 2 [packed = true]; + + // If this SourceCodeInfo represents a complete declaration, these are any + // comments appearing before and after the declaration which appear to be + // attached to the declaration. + // + // A series of line comments appearing on consecutive lines, with no other + // tokens appearing on those lines, will be treated as a single comment. + // + // leading_detached_comments will keep paragraphs of comments that appear + // before (but not connected to) the current element. Each paragraph, + // separated by empty lines, will be one comment element in the repeated + // field. + // + // Only the comment content is provided; comment markers (e.g. //) are + // stripped out. For block comments, leading whitespace and an asterisk + // will be stripped from the beginning of each line other than the first. + // Newlines are included in the output. + // + // Examples: + // + // optional int32 foo = 1; // Comment attached to foo. + // // Comment attached to bar. + // optional int32 bar = 2; + // + // optional string baz = 3; + // // Comment attached to baz. + // // Another line attached to baz. + // + // // Comment attached to moo. + // // + // // Another line attached to moo. + // optional double moo = 4; + // + // // Detached comment for corge. This is not leading or trailing comments + // // to moo or corge because there are blank lines separating it from + // // both. + // + // // Detached comment for corge paragraph 2. + // + // optional string corge = 5; + // /* Block comment attached + // * to corge. Leading asterisks + // * will be removed. */ + // /* Block comment attached to + // * grault. */ + // optional int32 grault = 6; + // + // // ignored detached comments. + optional string leading_comments = 3; + optional string trailing_comments = 4; + repeated string leading_detached_comments = 6; + } +} + +// Describes the relationship between generated code and its original source +// file. A GeneratedCodeInfo message is associated with only one generated +// source file, but may contain references to different source .proto files. +message GeneratedCodeInfo { + // An Annotation connects some span of text in generated code to an element + // of its generating .proto file. + repeated Annotation annotation = 1; + message Annotation { + // Identifies the element in the original source .proto file. This field + // is formatted the same as SourceCodeInfo.Location.path. + repeated int32 path = 1 [packed = true]; + + // Identifies the filesystem path to the original source .proto. + optional string source_file = 2; + + // Identifies the starting offset in bytes in the generated code + // that relates to the identified object. + optional int32 begin = 3; + + // Identifies the ending offset in bytes in the generated code that + // relates to the identified object. The end offset should be one past + // the last relevant byte (so the length of the text = end - begin). + optional int32 end = 4; + + // Represents the identified object's effect on the element in the original + // .proto file. + enum Semantic { + // There is no effect or the effect is indescribable. + NONE = 0; + // The element is set or otherwise mutated. + SET = 1; + // An alias to the element is returned. + ALIAS = 2; + } + optional Semantic semantic = 5; + } +} diff --git a/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/duration.proto b/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/duration.proto new file mode 100644 index 000000000000..41f40c22247d --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/duration.proto @@ -0,0 +1,115 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/protobuf/types/known/durationpb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "DurationProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; + +// A Duration represents a signed, fixed-length span of time represented +// as a count of seconds and fractions of seconds at nanosecond +// resolution. It is independent of any calendar and concepts like "day" +// or "month". It is related to Timestamp in that the difference between +// two Timestamp values is a Duration and it can be added or subtracted +// from a Timestamp. Range is approximately +-10,000 years. +// +// # Examples +// +// Example 1: Compute Duration from two Timestamps in pseudo code. +// +// Timestamp start = ...; +// Timestamp end = ...; +// Duration duration = ...; +// +// duration.seconds = end.seconds - start.seconds; +// duration.nanos = end.nanos - start.nanos; +// +// if (duration.seconds < 0 && duration.nanos > 0) { +// duration.seconds += 1; +// duration.nanos -= 1000000000; +// } else if (duration.seconds > 0 && duration.nanos < 0) { +// duration.seconds -= 1; +// duration.nanos += 1000000000; +// } +// +// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. +// +// Timestamp start = ...; +// Duration duration = ...; +// Timestamp end = ...; +// +// end.seconds = start.seconds + duration.seconds; +// end.nanos = start.nanos + duration.nanos; +// +// if (end.nanos < 0) { +// end.seconds -= 1; +// end.nanos += 1000000000; +// } else if (end.nanos >= 1000000000) { +// end.seconds += 1; +// end.nanos -= 1000000000; +// } +// +// Example 3: Compute Duration from datetime.timedelta in Python. +// +// td = datetime.timedelta(days=3, minutes=10) +// duration = Duration() +// duration.FromTimedelta(td) +// +// # JSON Mapping +// +// In JSON format, the Duration type is encoded as a string rather than an +// object, where the string ends in the suffix "s" (indicating seconds) and +// is preceded by the number of seconds, with nanoseconds expressed as +// fractional seconds. For example, 3 seconds with 0 nanoseconds should be +// encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should +// be expressed in JSON format as "3.000000001s", and 3 seconds and 1 +// microsecond should be expressed in JSON format as "3.000001s". +// +message Duration { + // Signed seconds of the span of time. Must be from -315,576,000,000 + // to +315,576,000,000 inclusive. Note: these bounds are computed from: + // 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + int64 seconds = 1; + + // Signed fractions of a second at nanosecond resolution of the span + // of time. Durations less than one second are represented with a 0 + // `seconds` field and a positive or negative `nanos` field. For durations + // of one second or more, a non-zero value for the `nanos` field must be + // of the same sign as the `seconds` field. Must be from -999,999,999 + // to +999,999,999 inclusive. + int32 nanos = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/empty.proto b/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/empty.proto new file mode 100644 index 000000000000..b87c89dcfce9 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/empty.proto @@ -0,0 +1,51 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option go_package = "google.golang.org/protobuf/types/known/emptypb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "EmptyProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option cc_enable_arenas = true; + +// A generic empty message that you can re-use to avoid defining duplicated +// empty messages in your APIs. A typical example is to use it as the request +// or the response type of an API method. For instance: +// +// service Foo { +// rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); +// } +// +message Empty {} diff --git a/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/field_mask.proto b/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/field_mask.proto new file mode 100644 index 000000000000..b28334b94392 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/field_mask.proto @@ -0,0 +1,245 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option java_package = "com.google.protobuf"; +option java_outer_classname = "FieldMaskProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option go_package = "google.golang.org/protobuf/types/known/fieldmaskpb"; +option cc_enable_arenas = true; + +// `FieldMask` represents a set of symbolic field paths, for example: +// +// paths: "f.a" +// paths: "f.b.d" +// +// Here `f` represents a field in some root message, `a` and `b` +// fields in the message found in `f`, and `d` a field found in the +// message in `f.b`. +// +// Field masks are used to specify a subset of fields that should be +// returned by a get operation or modified by an update operation. +// Field masks also have a custom JSON encoding (see below). +// +// # Field Masks in Projections +// +// When used in the context of a projection, a response message or +// sub-message is filtered by the API to only contain those fields as +// specified in the mask. For example, if the mask in the previous +// example is applied to a response message as follows: +// +// f { +// a : 22 +// b { +// d : 1 +// x : 2 +// } +// y : 13 +// } +// z: 8 +// +// The result will not contain specific values for fields x,y and z +// (their value will be set to the default, and omitted in proto text +// output): +// +// +// f { +// a : 22 +// b { +// d : 1 +// } +// } +// +// A repeated field is not allowed except at the last position of a +// paths string. +// +// If a FieldMask object is not present in a get operation, the +// operation applies to all fields (as if a FieldMask of all fields +// had been specified). +// +// Note that a field mask does not necessarily apply to the +// top-level response message. In case of a REST get operation, the +// field mask applies directly to the response, but in case of a REST +// list operation, the mask instead applies to each individual message +// in the returned resource list. In case of a REST custom method, +// other definitions may be used. Where the mask applies will be +// clearly documented together with its declaration in the API. In +// any case, the effect on the returned resource/resources is required +// behavior for APIs. +// +// # Field Masks in Update Operations +// +// A field mask in update operations specifies which fields of the +// targeted resource are going to be updated. The API is required +// to only change the values of the fields as specified in the mask +// and leave the others untouched. If a resource is passed in to +// describe the updated values, the API ignores the values of all +// fields not covered by the mask. +// +// If a repeated field is specified for an update operation, new values will +// be appended to the existing repeated field in the target resource. Note that +// a repeated field is only allowed in the last position of a `paths` string. +// +// If a sub-message is specified in the last position of the field mask for an +// update operation, then new value will be merged into the existing sub-message +// in the target resource. +// +// For example, given the target message: +// +// f { +// b { +// d: 1 +// x: 2 +// } +// c: [1] +// } +// +// And an update message: +// +// f { +// b { +// d: 10 +// } +// c: [2] +// } +// +// then if the field mask is: +// +// paths: ["f.b", "f.c"] +// +// then the result will be: +// +// f { +// b { +// d: 10 +// x: 2 +// } +// c: [1, 2] +// } +// +// An implementation may provide options to override this default behavior for +// repeated and message fields. +// +// In order to reset a field's value to the default, the field must +// be in the mask and set to the default value in the provided resource. +// Hence, in order to reset all fields of a resource, provide a default +// instance of the resource and set all fields in the mask, or do +// not provide a mask as described below. +// +// If a field mask is not present on update, the operation applies to +// all fields (as if a field mask of all fields has been specified). +// Note that in the presence of schema evolution, this may mean that +// fields the client does not know and has therefore not filled into +// the request will be reset to their default. If this is unwanted +// behavior, a specific service may require a client to always specify +// a field mask, producing an error if not. +// +// As with get operations, the location of the resource which +// describes the updated values in the request message depends on the +// operation kind. In any case, the effect of the field mask is +// required to be honored by the API. +// +// ## Considerations for HTTP REST +// +// The HTTP kind of an update operation which uses a field mask must +// be set to PATCH instead of PUT in order to satisfy HTTP semantics +// (PUT must only be used for full updates). +// +// # JSON Encoding of Field Masks +// +// In JSON, a field mask is encoded as a single string where paths are +// separated by a comma. Fields name in each path are converted +// to/from lower-camel naming conventions. +// +// As an example, consider the following message declarations: +// +// message Profile { +// User user = 1; +// Photo photo = 2; +// } +// message User { +// string display_name = 1; +// string address = 2; +// } +// +// In proto a field mask for `Profile` may look as such: +// +// mask { +// paths: "user.display_name" +// paths: "photo" +// } +// +// In JSON, the same mask is represented as below: +// +// { +// mask: "user.displayName,photo" +// } +// +// # Field Masks and Oneof Fields +// +// Field masks treat fields in oneofs just as regular fields. Consider the +// following message: +// +// message SampleMessage { +// oneof test_oneof { +// string name = 4; +// SubMessage sub_message = 9; +// } +// } +// +// The field mask can be: +// +// mask { +// paths: "name" +// } +// +// Or: +// +// mask { +// paths: "sub_message" +// } +// +// Note that oneof type names ("test_oneof" in this case) cannot be used in +// paths. +// +// ## Field Mask Verification +// +// The implementation of any API method which has a FieldMask type field in the +// request should verify the included field paths, and return an +// `INVALID_ARGUMENT` error if any path is unmappable. +message FieldMask { + // The set of field mask paths. + repeated string paths = 1; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/source_context.proto b/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/source_context.proto new file mode 100644 index 000000000000..135f50fea51c --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/source_context.proto @@ -0,0 +1,48 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option java_package = "com.google.protobuf"; +option java_outer_classname = "SourceContextProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option go_package = "google.golang.org/protobuf/types/known/sourcecontextpb"; + +// `SourceContext` represents information about the source of a +// protobuf element, like the file in which it is defined. +message SourceContext { + // The path-qualified name of the .proto file that contained the associated + // protobuf element. For example: `"google/protobuf/source_context.proto"`. + string file_name = 1; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/struct.proto b/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/struct.proto new file mode 100644 index 000000000000..1bf0c1ad9586 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/struct.proto @@ -0,0 +1,95 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/protobuf/types/known/structpb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "StructProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; + +// `Struct` represents a structured data value, consisting of fields +// which map to dynamically typed values. In some languages, `Struct` +// might be supported by a native representation. For example, in +// scripting languages like JS a struct is represented as an +// object. The details of that representation are described together +// with the proto support for the language. +// +// The JSON representation for `Struct` is JSON object. +message Struct { + // Unordered map of dynamically typed values. + map fields = 1; +} + +// `Value` represents a dynamically typed value which can be either +// null, a number, a string, a boolean, a recursive struct value, or a +// list of values. A producer of value is expected to set one of these +// variants. Absence of any variant indicates an error. +// +// The JSON representation for `Value` is JSON value. +message Value { + // The kind of value. + oneof kind { + // Represents a null value. + NullValue null_value = 1; + // Represents a double value. + double number_value = 2; + // Represents a string value. + string string_value = 3; + // Represents a boolean value. + bool bool_value = 4; + // Represents a structured value. + Struct struct_value = 5; + // Represents a repeated `Value`. + ListValue list_value = 6; + } +} + +// `NullValue` is a singleton enumeration to represent the null value for the +// `Value` type union. +// +// The JSON representation for `NullValue` is JSON `null`. +enum NullValue { + // Null value. + NULL_VALUE = 0; +} + +// `ListValue` is a wrapper around a repeated field of values. +// +// The JSON representation for `ListValue` is JSON array. +message ListValue { + // Repeated field of dynamically typed values. + repeated Value values = 1; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/timestamp.proto b/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/timestamp.proto new file mode 100644 index 000000000000..fd0bc07dc3c9 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/timestamp.proto @@ -0,0 +1,144 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/protobuf/types/known/timestamppb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "TimestampProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; + +// A Timestamp represents a point in time independent of any time zone or local +// calendar, encoded as a count of seconds and fractions of seconds at +// nanosecond resolution. The count is relative to an epoch at UTC midnight on +// January 1, 1970, in the proleptic Gregorian calendar which extends the +// Gregorian calendar backwards to year one. +// +// All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap +// second table is needed for interpretation, using a [24-hour linear +// smear](https://developers.google.com/time/smear). +// +// The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By +// restricting to that range, we ensure that we can convert to and from [RFC +// 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. +// +// # Examples +// +// Example 1: Compute Timestamp from POSIX `time()`. +// +// Timestamp timestamp; +// timestamp.set_seconds(time(NULL)); +// timestamp.set_nanos(0); +// +// Example 2: Compute Timestamp from POSIX `gettimeofday()`. +// +// struct timeval tv; +// gettimeofday(&tv, NULL); +// +// Timestamp timestamp; +// timestamp.set_seconds(tv.tv_sec); +// timestamp.set_nanos(tv.tv_usec * 1000); +// +// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. +// +// FILETIME ft; +// GetSystemTimeAsFileTime(&ft); +// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; +// +// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z +// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. +// Timestamp timestamp; +// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); +// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); +// +// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. +// +// long millis = System.currentTimeMillis(); +// +// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) +// .setNanos((int) ((millis % 1000) * 1000000)).build(); +// +// Example 5: Compute Timestamp from Java `Instant.now()`. +// +// Instant now = Instant.now(); +// +// Timestamp timestamp = +// Timestamp.newBuilder().setSeconds(now.getEpochSecond()) +// .setNanos(now.getNano()).build(); +// +// Example 6: Compute Timestamp from current time in Python. +// +// timestamp = Timestamp() +// timestamp.GetCurrentTime() +// +// # JSON Mapping +// +// In JSON format, the Timestamp type is encoded as a string in the +// [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the +// format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" +// where {year} is always expressed using four digits while {month}, {day}, +// {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional +// seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), +// are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone +// is required. A proto3 JSON serializer should always use UTC (as indicated by +// "Z") when printing the Timestamp type and a proto3 JSON parser should be +// able to accept both UTC and other timezones (as indicated by an offset). +// +// For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past +// 01:30 UTC on January 15, 2017. +// +// In JavaScript, one can convert a Date object to this format using the +// standard +// [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) +// method. In Python, a standard `datetime.datetime` object can be converted +// to this format using +// [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with +// the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use +// the Joda Time's [`ISODateTimeFormat.dateTime()`]( +// http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime() +// ) to obtain a formatter capable of generating timestamps in this format. +// +message Timestamp { + // Represents seconds of UTC time since Unix epoch + // 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + // 9999-12-31T23:59:59Z inclusive. + int64 seconds = 1; + + // Non-negative fractions of a second at nanosecond resolution. Negative + // second values with fractions must still have non-negative nanos values + // that count forward in time. Must be from 0 to 999,999,999 + // inclusive. + int32 nanos = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/type.proto b/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/type.proto new file mode 100644 index 000000000000..48cb11e75518 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/type.proto @@ -0,0 +1,193 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +import "google/protobuf/any.proto"; +import "google/protobuf/source_context.proto"; + +option cc_enable_arenas = true; +option java_package = "com.google.protobuf"; +option java_outer_classname = "TypeProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option go_package = "google.golang.org/protobuf/types/known/typepb"; + +// A protocol buffer message type. +message Type { + // The fully qualified message name. + string name = 1; + // The list of fields. + repeated Field fields = 2; + // The list of types appearing in `oneof` definitions in this type. + repeated string oneofs = 3; + // The protocol buffer options. + repeated Option options = 4; + // The source context. + SourceContext source_context = 5; + // The source syntax. + Syntax syntax = 6; + // The source edition string, only valid when syntax is SYNTAX_EDITIONS. + string edition = 7; +} + +// A single field of a message type. +message Field { + // Basic field types. + enum Kind { + // Field type unknown. + TYPE_UNKNOWN = 0; + // Field type double. + TYPE_DOUBLE = 1; + // Field type float. + TYPE_FLOAT = 2; + // Field type int64. + TYPE_INT64 = 3; + // Field type uint64. + TYPE_UINT64 = 4; + // Field type int32. + TYPE_INT32 = 5; + // Field type fixed64. + TYPE_FIXED64 = 6; + // Field type fixed32. + TYPE_FIXED32 = 7; + // Field type bool. + TYPE_BOOL = 8; + // Field type string. + TYPE_STRING = 9; + // Field type group. Proto2 syntax only, and deprecated. + TYPE_GROUP = 10; + // Field type message. + TYPE_MESSAGE = 11; + // Field type bytes. + TYPE_BYTES = 12; + // Field type uint32. + TYPE_UINT32 = 13; + // Field type enum. + TYPE_ENUM = 14; + // Field type sfixed32. + TYPE_SFIXED32 = 15; + // Field type sfixed64. + TYPE_SFIXED64 = 16; + // Field type sint32. + TYPE_SINT32 = 17; + // Field type sint64. + TYPE_SINT64 = 18; + } + + // Whether a field is optional, required, or repeated. + enum Cardinality { + // For fields with unknown cardinality. + CARDINALITY_UNKNOWN = 0; + // For optional fields. + CARDINALITY_OPTIONAL = 1; + // For required fields. Proto2 syntax only. + CARDINALITY_REQUIRED = 2; + // For repeated fields. + CARDINALITY_REPEATED = 3; + } + + // The field type. + Kind kind = 1; + // The field cardinality. + Cardinality cardinality = 2; + // The field number. + int32 number = 3; + // The field name. + string name = 4; + // The field type URL, without the scheme, for message or enumeration + // types. Example: `"type.googleapis.com/google.protobuf.Timestamp"`. + string type_url = 6; + // The index of the field type in `Type.oneofs`, for message or enumeration + // types. The first type has index 1; zero means the type is not in the list. + int32 oneof_index = 7; + // Whether to use alternative packed wire representation. + bool packed = 8; + // The protocol buffer options. + repeated Option options = 9; + // The field JSON name. + string json_name = 10; + // The string value of the default value of this field. Proto2 syntax only. + string default_value = 11; +} + +// Enum type definition. +message Enum { + // Enum type name. + string name = 1; + // Enum value definitions. + repeated EnumValue enumvalue = 2; + // Protocol buffer options. + repeated Option options = 3; + // The source context. + SourceContext source_context = 4; + // The source syntax. + Syntax syntax = 5; + // The source edition string, only valid when syntax is SYNTAX_EDITIONS. + string edition = 6; +} + +// Enum value definition. +message EnumValue { + // Enum value name. + string name = 1; + // Enum value number. + int32 number = 2; + // Protocol buffer options. + repeated Option options = 3; +} + +// A protocol buffer option, which can be attached to a message, field, +// enumeration, etc. +message Option { + // The option's name. For protobuf built-in options (options defined in + // descriptor.proto), this is the short name. For example, `"map_entry"`. + // For custom options, it should be the fully-qualified name. For example, + // `"google.api.http"`. + string name = 1; + // The option's value packed in an Any message. If the value is a primitive, + // the corresponding wrapper type defined in google/protobuf/wrappers.proto + // should be used. If the value is an enum, it should be stored as an int32 + // value using the google.protobuf.Int32Value type. + Any value = 2; +} + +// The syntax in which a protocol buffer element is defined. +enum Syntax { + // Syntax `proto2`. + SYNTAX_PROTO2 = 0; + // Syntax `proto3`. + SYNTAX_PROTO3 = 1; + // Syntax `editions`. + SYNTAX_EDITIONS = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/wrappers.proto b/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/wrappers.proto new file mode 100644 index 000000000000..1959fa55a4e7 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/b8463852018782c2dc2b4492aee590b0/google/protobuf/wrappers.proto @@ -0,0 +1,123 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Wrappers for primitive (non-message) types. These types are useful +// for embedding primitives in the `google.protobuf.Any` type and for places +// where we need to distinguish between the absence of a primitive +// typed field and its default value. +// +// These wrappers have no meaningful use within repeated fields as they lack +// the ability to detect presence on individual elements. +// These wrappers have no meaningful use within a map or a oneof since +// individual entries of a map or fields of a oneof can already detect presence. + +syntax = "proto3"; + +package google.protobuf; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/protobuf/types/known/wrapperspb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "WrappersProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; + +// Wrapper message for `double`. +// +// The JSON representation for `DoubleValue` is JSON number. +message DoubleValue { + // The double value. + double value = 1; +} + +// Wrapper message for `float`. +// +// The JSON representation for `FloatValue` is JSON number. +message FloatValue { + // The float value. + float value = 1; +} + +// Wrapper message for `int64`. +// +// The JSON representation for `Int64Value` is JSON string. +message Int64Value { + // The int64 value. + int64 value = 1; +} + +// Wrapper message for `uint64`. +// +// The JSON representation for `UInt64Value` is JSON string. +message UInt64Value { + // The uint64 value. + uint64 value = 1; +} + +// Wrapper message for `int32`. +// +// The JSON representation for `Int32Value` is JSON number. +message Int32Value { + // The int32 value. + int32 value = 1; +} + +// Wrapper message for `uint32`. +// +// The JSON representation for `UInt32Value` is JSON number. +message UInt32Value { + // The uint32 value. + uint32 value = 1; +} + +// Wrapper message for `bool`. +// +// The JSON representation for `BoolValue` is JSON `true` and `false`. +message BoolValue { + // The bool value. + bool value = 1; +} + +// Wrapper message for `string`. +// +// The JSON representation for `StringValue` is JSON string. +message StringValue { + // The string value. + string value = 1; +} + +// Wrapper message for `bytes`. +// +// The JSON representation for `BytesValue` is JSON string. +message BytesValue { + // The bytes value. + bytes value = 1; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/annotations.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/annotations.proto new file mode 100644 index 000000000000..efdab3db6ca8 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/annotations.proto @@ -0,0 +1,31 @@ +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/api/http.proto"; +import "google/protobuf/descriptor.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "AnnotationsProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.MethodOptions { + // See `HttpRule`. + HttpRule http = 72295728; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/auth.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/auth.proto new file mode 100644 index 000000000000..ca91bb1bf069 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/auth.proto @@ -0,0 +1,237 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "AuthProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// `Authentication` defines the authentication configuration for API methods +// provided by an API service. +// +// Example: +// +// name: calendar.googleapis.com +// authentication: +// providers: +// - id: google_calendar_auth +// jwks_uri: https://www.googleapis.com/oauth2/v1/certs +// issuer: https://securetoken.google.com +// rules: +// - selector: "*" +// requirements: +// provider_id: google_calendar_auth +// - selector: google.calendar.Delegate +// oauth: +// canonical_scopes: https://www.googleapis.com/auth/calendar.read +message Authentication { + // A list of authentication rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated AuthenticationRule rules = 3; + + // Defines a set of authentication providers that a service supports. + repeated AuthProvider providers = 4; +} + +// Authentication rules for the service. +// +// By default, if a method has any authentication requirements, every request +// must include a valid credential matching one of the requirements. +// It's an error to include more than one kind of credential in a single +// request. +// +// If a method doesn't have any auth requirements, request credentials will be +// ignored. +message AuthenticationRule { + // Selects the methods to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax + // details. + string selector = 1; + + // The requirements for OAuth credentials. + OAuthRequirements oauth = 2; + + // If true, the service accepts API keys without any other credential. + // This flag only applies to HTTP and gRPC requests. + bool allow_without_credential = 5; + + // Requirements for additional authentication providers. + repeated AuthRequirement requirements = 7; +} + +// Specifies a location to extract JWT from an API request. +message JwtLocation { + oneof in { + // Specifies HTTP header name to extract JWT token. + string header = 1; + + // Specifies URL query parameter name to extract JWT token. + string query = 2; + + // Specifies cookie name to extract JWT token. + string cookie = 4; + } + + // The value prefix. The value format is "value_prefix{token}" + // Only applies to "in" header type. Must be empty for "in" query type. + // If not empty, the header value has to match (case sensitive) this prefix. + // If not matched, JWT will not be extracted. If matched, JWT will be + // extracted after the prefix is removed. + // + // For example, for "Authorization: Bearer {JWT}", + // value_prefix="Bearer " with a space at the end. + string value_prefix = 3; +} + +// Configuration for an authentication provider, including support for +// [JSON Web Token +// (JWT)](https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32). +message AuthProvider { + // The unique identifier of the auth provider. It will be referred to by + // `AuthRequirement.provider_id`. + // + // Example: "bookstore_auth". + string id = 1; + + // Identifies the principal that issued the JWT. See + // https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32#section-4.1.1 + // Usually a URL or an email address. + // + // Example: https://securetoken.google.com + // Example: 1234567-compute@developer.gserviceaccount.com + string issuer = 2; + + // URL of the provider's public key set to validate signature of the JWT. See + // [OpenID + // Discovery](https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata). + // Optional if the key set document: + // - can be retrieved from + // [OpenID + // Discovery](https://openid.net/specs/openid-connect-discovery-1_0.html) + // of the issuer. + // - can be inferred from the email domain of the issuer (e.g. a Google + // service account). + // + // Example: https://www.googleapis.com/oauth2/v1/certs + string jwks_uri = 3; + + // The list of JWT + // [audiences](https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32#section-4.1.3). + // that are allowed to access. A JWT containing any of these audiences will + // be accepted. When this setting is absent, JWTs with audiences: + // - "https://[service.name]/[google.protobuf.Api.name]" + // - "https://[service.name]/" + // will be accepted. + // For example, if no audiences are in the setting, LibraryService API will + // accept JWTs with the following audiences: + // - + // https://library-example.googleapis.com/google.example.library.v1.LibraryService + // - https://library-example.googleapis.com/ + // + // Example: + // + // audiences: bookstore_android.apps.googleusercontent.com, + // bookstore_web.apps.googleusercontent.com + string audiences = 4; + + // Redirect URL if JWT token is required but not present or is expired. + // Implement authorizationUrl of securityDefinitions in OpenAPI spec. + string authorization_url = 5; + + // Defines the locations to extract the JWT. For now it is only used by the + // Cloud Endpoints to store the OpenAPI extension [x-google-jwt-locations] + // (https://cloud.google.com/endpoints/docs/openapi/openapi-extensions#x-google-jwt-locations) + // + // JWT locations can be one of HTTP headers, URL query parameters or + // cookies. The rule is that the first match wins. + // + // If not specified, default to use following 3 locations: + // 1) Authorization: Bearer + // 2) x-goog-iap-jwt-assertion + // 3) access_token query parameter + // + // Default locations can be specified as followings: + // jwt_locations: + // - header: Authorization + // value_prefix: "Bearer " + // - header: x-goog-iap-jwt-assertion + // - query: access_token + repeated JwtLocation jwt_locations = 6; +} + +// OAuth scopes are a way to define data and permissions on data. For example, +// there are scopes defined for "Read-only access to Google Calendar" and +// "Access to Cloud Platform". Users can consent to a scope for an application, +// giving it permission to access that data on their behalf. +// +// OAuth scope specifications should be fairly coarse grained; a user will need +// to see and understand the text description of what your scope means. +// +// In most cases: use one or at most two OAuth scopes for an entire family of +// products. If your product has multiple APIs, you should probably be sharing +// the OAuth scope across all of those APIs. +// +// When you need finer grained OAuth consent screens: talk with your product +// management about how developers will use them in practice. +// +// Please note that even though each of the canonical scopes is enough for a +// request to be accepted and passed to the backend, a request can still fail +// due to the backend requiring additional scopes or permissions. +message OAuthRequirements { + // The list of publicly documented OAuth scopes that are allowed access. An + // OAuth token containing any of these scopes will be accepted. + // + // Example: + // + // canonical_scopes: https://www.googleapis.com/auth/calendar, + // https://www.googleapis.com/auth/calendar.read + string canonical_scopes = 1; +} + +// User-defined authentication requirements, including support for +// [JSON Web Token +// (JWT)](https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32). +message AuthRequirement { + // [id][google.api.AuthProvider.id] from authentication provider. + // + // Example: + // + // provider_id: bookstore_auth + string provider_id = 1; + + // NOTE: This will be deprecated soon, once AuthProvider.audiences is + // implemented and accepted in all the runtime components. + // + // The list of JWT + // [audiences](https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32#section-4.1.3). + // that are allowed to access. A JWT containing any of these audiences will + // be accepted. When this setting is absent, only JWTs with audience + // "https://[Service_name][google.api.Service.name]/[API_name][google.protobuf.Api.name]" + // will be accepted. For example, if no audiences are in the setting, + // LibraryService API will only accept JWTs with the following audience + // "https://library-example.googleapis.com/google.example.library.v1.LibraryService". + // + // Example: + // + // audiences: bookstore_android.apps.googleusercontent.com, + // bookstore_web.apps.googleusercontent.com + string audiences = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/backend.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/backend.proto new file mode 100644 index 000000000000..6ff68878b6e3 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/backend.proto @@ -0,0 +1,185 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "BackendProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// `Backend` defines the backend configuration for a service. +message Backend { + // A list of API backend rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated BackendRule rules = 1; +} + +// A backend rule provides configuration for an individual API element. +message BackendRule { + // Path Translation specifies how to combine the backend address with the + // request path in order to produce the appropriate forwarding URL for the + // request. + // + // Path Translation is applicable only to HTTP-based backends. Backends which + // do not accept requests over HTTP/HTTPS should leave `path_translation` + // unspecified. + enum PathTranslation { + PATH_TRANSLATION_UNSPECIFIED = 0; + + // Use the backend address as-is, with no modification to the path. If the + // URL pattern contains variables, the variable names and values will be + // appended to the query string. If a query string parameter and a URL + // pattern variable have the same name, this may result in duplicate keys in + // the query string. + // + // # Examples + // + // Given the following operation config: + // + // Method path: /api/company/{cid}/user/{uid} + // Backend address: https://example.cloudfunctions.net/getUser + // + // Requests to the following request paths will call the backend at the + // translated path: + // + // Request path: /api/company/widgetworks/user/johndoe + // Translated: + // https://example.cloudfunctions.net/getUser?cid=widgetworks&uid=johndoe + // + // Request path: /api/company/widgetworks/user/johndoe?timezone=EST + // Translated: + // https://example.cloudfunctions.net/getUser?timezone=EST&cid=widgetworks&uid=johndoe + CONSTANT_ADDRESS = 1; + + // The request path will be appended to the backend address. + // + // # Examples + // + // Given the following operation config: + // + // Method path: /api/company/{cid}/user/{uid} + // Backend address: https://example.appspot.com + // + // Requests to the following request paths will call the backend at the + // translated path: + // + // Request path: /api/company/widgetworks/user/johndoe + // Translated: + // https://example.appspot.com/api/company/widgetworks/user/johndoe + // + // Request path: /api/company/widgetworks/user/johndoe?timezone=EST + // Translated: + // https://example.appspot.com/api/company/widgetworks/user/johndoe?timezone=EST + APPEND_PATH_TO_ADDRESS = 2; + } + + // Selects the methods to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax + // details. + string selector = 1; + + // The address of the API backend. + // + // The scheme is used to determine the backend protocol and security. + // The following schemes are accepted: + // + // SCHEME PROTOCOL SECURITY + // http:// HTTP None + // https:// HTTP TLS + // grpc:// gRPC None + // grpcs:// gRPC TLS + // + // It is recommended to explicitly include a scheme. Leaving out the scheme + // may cause constrasting behaviors across platforms. + // + // If the port is unspecified, the default is: + // - 80 for schemes without TLS + // - 443 for schemes with TLS + // + // For HTTP backends, use [protocol][google.api.BackendRule.protocol] + // to specify the protocol version. + string address = 2; + + // The number of seconds to wait for a response from a request. The default + // varies based on the request protocol and deployment environment. + double deadline = 3; + + // Deprecated, do not use. + double min_deadline = 4 [deprecated = true]; + + // The number of seconds to wait for the completion of a long running + // operation. The default is no deadline. + double operation_deadline = 5; + + PathTranslation path_translation = 6; + + // Authentication settings used by the backend. + // + // These are typically used to provide service management functionality to + // a backend served on a publicly-routable URL. The `authentication` + // details should match the authentication behavior used by the backend. + // + // For example, specifying `jwt_audience` implies that the backend expects + // authentication via a JWT. + // + // When authentication is unspecified, the resulting behavior is the same + // as `disable_auth` set to `true`. + // + // Refer to https://developers.google.com/identity/protocols/OpenIDConnect for + // JWT ID token. + oneof authentication { + // The JWT audience is used when generating a JWT ID token for the backend. + // This ID token will be added in the HTTP "authorization" header, and sent + // to the backend. + string jwt_audience = 7; + + // When disable_auth is true, a JWT ID token won't be generated and the + // original "Authorization" HTTP header will be preserved. If the header is + // used to carry the original token and is expected by the backend, this + // field must be set to true to preserve the header. + bool disable_auth = 8; + } + + // The protocol used for sending a request to the backend. + // The supported values are "http/1.1" and "h2". + // + // The default value is inferred from the scheme in the + // [address][google.api.BackendRule.address] field: + // + // SCHEME PROTOCOL + // http:// http/1.1 + // https:// http/1.1 + // grpc:// h2 + // grpcs:// h2 + // + // For secure HTTP backends (https://) that support HTTP/2, set this field + // to "h2" for improved performance. + // + // Configuring this field to non-default values is only supported for secure + // HTTP backends. This field will be ignored for all other backends. + // + // See + // https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids + // for more details on the supported values. + string protocol = 9; + + // The map between request protocol and the backend address. + map overrides_by_request_protocol = 10; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/billing.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/billing.proto new file mode 100644 index 000000000000..8b75452fbcf9 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/billing.proto @@ -0,0 +1,77 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "BillingProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Billing related configuration of the service. +// +// The following example shows how to configure monitored resources and metrics +// for billing, `consumer_destinations` is the only supported destination and +// the monitored resources need at least one label key +// `cloud.googleapis.com/location` to indicate the location of the billing +// usage, using different monitored resources between monitoring and billing is +// recommended so they can be evolved independently: +// +// +// monitored_resources: +// - type: library.googleapis.com/billing_branch +// labels: +// - key: cloud.googleapis.com/location +// description: | +// Predefined label to support billing location restriction. +// - key: city +// description: | +// Custom label to define the city where the library branch is located +// in. +// - key: name +// description: Custom label to define the name of the library branch. +// metrics: +// - name: library.googleapis.com/book/borrowed_count +// metric_kind: DELTA +// value_type: INT64 +// unit: "1" +// billing: +// consumer_destinations: +// - monitored_resource: library.googleapis.com/billing_branch +// metrics: +// - library.googleapis.com/book/borrowed_count +message Billing { + // Configuration of a specific billing destination (Currently only support + // bill against consumer project). + message BillingDestination { + // The monitored resource type. The type must be defined in + // [Service.monitored_resources][google.api.Service.monitored_resources] + // section. + string monitored_resource = 1; + + // Names of the metrics to report to this billing destination. + // Each name must be defined in + // [Service.metrics][google.api.Service.metrics] section. + repeated string metrics = 2; + } + + // Billing configurations for sending metrics to the consumer project. + // There can be multiple consumer destinations per service, each one must have + // a different monitored resource type. A metric can be used in at most + // one consumer destination. + repeated BillingDestination consumer_destinations = 8; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/client.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/client.proto new file mode 100644 index 000000000000..0952e8373c73 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/client.proto @@ -0,0 +1,427 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/api/launch_stage.proto"; +import "google/protobuf/descriptor.proto"; +import "google/protobuf/duration.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "ClientProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.MethodOptions { + // A definition of a client library method signature. + // + // In client libraries, each proto RPC corresponds to one or more methods + // which the end user is able to call, and calls the underlying RPC. + // Normally, this method receives a single argument (a struct or instance + // corresponding to the RPC request object). Defining this field will + // add one or more overloads providing flattened or simpler method signatures + // in some languages. + // + // The fields on the method signature are provided as a comma-separated + // string. + // + // For example, the proto RPC and annotation: + // + // rpc CreateSubscription(CreateSubscriptionRequest) + // returns (Subscription) { + // option (google.api.method_signature) = "name,topic"; + // } + // + // Would add the following Java overload (in addition to the method accepting + // the request object): + // + // public final Subscription createSubscription(String name, String topic) + // + // The following backwards-compatibility guidelines apply: + // + // * Adding this annotation to an unannotated method is backwards + // compatible. + // * Adding this annotation to a method which already has existing + // method signature annotations is backwards compatible if and only if + // the new method signature annotation is last in the sequence. + // * Modifying or removing an existing method signature annotation is + // a breaking change. + // * Re-ordering existing method signature annotations is a breaking + // change. + repeated string method_signature = 1051; +} + +extend google.protobuf.ServiceOptions { + // The hostname for this service. + // This should be specified with no prefix or protocol. + // + // Example: + // + // service Foo { + // option (google.api.default_host) = "foo.googleapi.com"; + // ... + // } + string default_host = 1049; + + // OAuth scopes needed for the client. + // + // Example: + // + // service Foo { + // option (google.api.oauth_scopes) = \ + // "https://www.googleapis.com/auth/cloud-platform"; + // ... + // } + // + // If there is more than one scope, use a comma-separated string: + // + // Example: + // + // service Foo { + // option (google.api.oauth_scopes) = \ + // "https://www.googleapis.com/auth/cloud-platform," + // "https://www.googleapis.com/auth/monitoring"; + // ... + // } + string oauth_scopes = 1050; + + // The API version of this service, which should be sent by version-aware + // clients to the service. This allows services to abide by the schema and + // behavior of the service at the time this API version was deployed. + // The format of the API version must be treated as opaque by clients. + // Services may use a format with an apparent structure, but clients must + // not rely on this to determine components within an API version, or attempt + // to construct other valid API versions. Note that this is for upcoming + // functionality and may not be implemented for all services. + // + // Example: + // + // service Foo { + // option (google.api.api_version) = "v1_20230821_preview"; + // } + string api_version = 525000001; +} + +// Required information for every language. +message CommonLanguageSettings { + // Link to automatically generated reference documentation. Example: + // https://cloud.google.com/nodejs/docs/reference/asset/latest + string reference_docs_uri = 1 [deprecated = true]; + + // The destination where API teams want this client library to be published. + repeated ClientLibraryDestination destinations = 2; +} + +// Details about how and where to publish client libraries. +message ClientLibrarySettings { + // Version of the API to apply these settings to. This is the full protobuf + // package for the API, ending in the version element. + // Examples: "google.cloud.speech.v1" and "google.spanner.admin.database.v1". + string version = 1; + + // Launch stage of this version of the API. + LaunchStage launch_stage = 2; + + // When using transport=rest, the client request will encode enums as + // numbers rather than strings. + bool rest_numeric_enums = 3; + + // Settings for legacy Java features, supported in the Service YAML. + JavaSettings java_settings = 21; + + // Settings for C++ client libraries. + CppSettings cpp_settings = 22; + + // Settings for PHP client libraries. + PhpSettings php_settings = 23; + + // Settings for Python client libraries. + PythonSettings python_settings = 24; + + // Settings for Node client libraries. + NodeSettings node_settings = 25; + + // Settings for .NET client libraries. + DotnetSettings dotnet_settings = 26; + + // Settings for Ruby client libraries. + RubySettings ruby_settings = 27; + + // Settings for Go client libraries. + GoSettings go_settings = 28; +} + +// This message configures the settings for publishing [Google Cloud Client +// libraries](https://cloud.google.com/apis/docs/cloud-client-libraries) +// generated from the service config. +message Publishing { + // A list of API method settings, e.g. the behavior for methods that use the + // long-running operation pattern. + repeated MethodSettings method_settings = 2; + + // Link to a *public* URI where users can report issues. Example: + // https://issuetracker.google.com/issues/new?component=190865&template=1161103 + string new_issue_uri = 101; + + // Link to product home page. Example: + // https://cloud.google.com/asset-inventory/docs/overview + string documentation_uri = 102; + + // Used as a tracking tag when collecting data about the APIs developer + // relations artifacts like docs, packages delivered to package managers, + // etc. Example: "speech". + string api_short_name = 103; + + // GitHub label to apply to issues and pull requests opened for this API. + string github_label = 104; + + // GitHub teams to be added to CODEOWNERS in the directory in GitHub + // containing source code for the client libraries for this API. + repeated string codeowner_github_teams = 105; + + // A prefix used in sample code when demarking regions to be included in + // documentation. + string doc_tag_prefix = 106; + + // For whom the client library is being published. + ClientLibraryOrganization organization = 107; + + // Client library settings. If the same version string appears multiple + // times in this list, then the last one wins. Settings from earlier + // settings with the same version string are discarded. + repeated ClientLibrarySettings library_settings = 109; + + // Optional link to proto reference documentation. Example: + // https://cloud.google.com/pubsub/lite/docs/reference/rpc + string proto_reference_documentation_uri = 110; + + // Optional link to REST reference documentation. Example: + // https://cloud.google.com/pubsub/lite/docs/reference/rest + string rest_reference_documentation_uri = 111; +} + +// Settings for Java client libraries. +message JavaSettings { + // The package name to use in Java. Clobbers the java_package option + // set in the protobuf. This should be used **only** by APIs + // who have already set the language_settings.java.package_name" field + // in gapic.yaml. API teams should use the protobuf java_package option + // where possible. + // + // Example of a YAML configuration:: + // + // publishing: + // java_settings: + // library_package: com.google.cloud.pubsub.v1 + string library_package = 1; + + // Configure the Java class name to use instead of the service's for its + // corresponding generated GAPIC client. Keys are fully-qualified + // service names as they appear in the protobuf (including the full + // the language_settings.java.interface_names" field in gapic.yaml. API + // teams should otherwise use the service name as it appears in the + // protobuf. + // + // Example of a YAML configuration:: + // + // publishing: + // java_settings: + // service_class_names: + // - google.pubsub.v1.Publisher: TopicAdmin + // - google.pubsub.v1.Subscriber: SubscriptionAdmin + map service_class_names = 2; + + // Some settings. + CommonLanguageSettings common = 3; +} + +// Settings for C++ client libraries. +message CppSettings { + // Some settings. + CommonLanguageSettings common = 1; +} + +// Settings for Php client libraries. +message PhpSettings { + // Some settings. + CommonLanguageSettings common = 1; +} + +// Settings for Python client libraries. +message PythonSettings { + // Some settings. + CommonLanguageSettings common = 1; +} + +// Settings for Node client libraries. +message NodeSettings { + // Some settings. + CommonLanguageSettings common = 1; +} + +// Settings for Dotnet client libraries. +message DotnetSettings { + // Some settings. + CommonLanguageSettings common = 1; + + // Map from original service names to renamed versions. + // This is used when the default generated types + // would cause a naming conflict. (Neither name is + // fully-qualified.) + // Example: Subscriber to SubscriberServiceApi. + map renamed_services = 2; + + // Map from full resource types to the effective short name + // for the resource. This is used when otherwise resource + // named from different services would cause naming collisions. + // Example entry: + // "datalabeling.googleapis.com/Dataset": "DataLabelingDataset" + map renamed_resources = 3; + + // List of full resource types to ignore during generation. + // This is typically used for API-specific Location resources, + // which should be handled by the generator as if they were actually + // the common Location resources. + // Example entry: "documentai.googleapis.com/Location" + repeated string ignored_resources = 4; + + // Namespaces which must be aliased in snippets due to + // a known (but non-generator-predictable) naming collision + repeated string forced_namespace_aliases = 5; + + // Method signatures (in the form "service.method(signature)") + // which are provided separately, so shouldn't be generated. + // Snippets *calling* these methods are still generated, however. + repeated string handwritten_signatures = 6; +} + +// Settings for Ruby client libraries. +message RubySettings { + // Some settings. + CommonLanguageSettings common = 1; +} + +// Settings for Go client libraries. +message GoSettings { + // Some settings. + CommonLanguageSettings common = 1; +} + +// Describes the generator configuration for a method. +message MethodSettings { + // Describes settings to use when generating API methods that use the + // long-running operation pattern. + // All default values below are from those used in the client library + // generators (e.g. + // [Java](https://github.com/googleapis/gapic-generator-java/blob/04c2faa191a9b5a10b92392fe8482279c4404803/src/main/java/com/google/api/generator/gapic/composer/common/RetrySettingsComposer.java)). + message LongRunning { + // Initial delay after which the first poll request will be made. + // Default value: 5 seconds. + google.protobuf.Duration initial_poll_delay = 1; + + // Multiplier to gradually increase delay between subsequent polls until it + // reaches max_poll_delay. + // Default value: 1.5. + float poll_delay_multiplier = 2; + + // Maximum time between two subsequent poll requests. + // Default value: 45 seconds. + google.protobuf.Duration max_poll_delay = 3; + + // Total polling timeout. + // Default value: 5 minutes. + google.protobuf.Duration total_poll_timeout = 4; + } + + // The fully qualified name of the method, for which the options below apply. + // This is used to find the method to apply the options. + string selector = 1; + + // Describes settings to use for long-running operations when generating + // API methods for RPCs. Complements RPCs that use the annotations in + // google/longrunning/operations.proto. + // + // Example of a YAML configuration:: + // + // publishing: + // method_settings: + // - selector: google.cloud.speech.v2.Speech.BatchRecognize + // long_running: + // initial_poll_delay: + // seconds: 60 # 1 minute + // poll_delay_multiplier: 1.5 + // max_poll_delay: + // seconds: 360 # 6 minutes + // total_poll_timeout: + // seconds: 54000 # 90 minutes + LongRunning long_running = 2; + + // List of top-level fields of the request message, that should be + // automatically populated by the client libraries based on their + // (google.api.field_info).format. Currently supported format: UUID4. + // + // Example of a YAML configuration: + // + // publishing: + // method_settings: + // - selector: google.example.v1.ExampleService.CreateExample + // auto_populated_fields: + // - request_id + repeated string auto_populated_fields = 3; +} + +// The organization for which the client libraries are being published. +// Affects the url where generated docs are published, etc. +enum ClientLibraryOrganization { + // Not useful. + CLIENT_LIBRARY_ORGANIZATION_UNSPECIFIED = 0; + + // Google Cloud Platform Org. + CLOUD = 1; + + // Ads (Advertising) Org. + ADS = 2; + + // Photos Org. + PHOTOS = 3; + + // Street View Org. + STREET_VIEW = 4; + + // Shopping Org. + SHOPPING = 5; + + // Geo Org. + GEO = 6; + + // Generative AI - https://developers.generativeai.google + GENERATIVE_AI = 7; +} + +// To where should client libraries be published? +enum ClientLibraryDestination { + // Client libraries will neither be generated nor published to package + // managers. + CLIENT_LIBRARY_DESTINATION_UNSPECIFIED = 0; + + // Generate the client library in a repo under github.com/googleapis, + // but don't publish it to package managers. + GITHUB = 10; + + // Publish the library to package managers like nuget.org and npmjs.com. + PACKAGE_MANAGER = 20; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/config_change.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/config_change.proto new file mode 100644 index 000000000000..1dc8044b9053 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/config_change.proto @@ -0,0 +1,84 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/configchange;configchange"; +option java_multiple_files = true; +option java_outer_classname = "ConfigChangeProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Output generated from semantically comparing two versions of a service +// configuration. +// +// Includes detailed information about a field that have changed with +// applicable advice about potential consequences for the change, such as +// backwards-incompatibility. +message ConfigChange { + // Object hierarchy path to the change, with levels separated by a '.' + // character. For repeated fields, an applicable unique identifier field is + // used for the index (usually selector, name, or id). For maps, the term + // 'key' is used. If the field has no unique identifier, the numeric index + // is used. + // Examples: + // - visibility.rules[selector=="google.LibraryService.ListBooks"].restriction + // - quota.metric_rules[selector=="google"].metric_costs[key=="reads"].value + // - logging.producer_destinations[0] + string element = 1; + + // Value of the changed object in the old Service configuration, + // in JSON format. This field will not be populated if ChangeType == ADDED. + string old_value = 2; + + // Value of the changed object in the new Service configuration, + // in JSON format. This field will not be populated if ChangeType == REMOVED. + string new_value = 3; + + // The type for this change, either ADDED, REMOVED, or MODIFIED. + ChangeType change_type = 4; + + // Collection of advice provided for this change, useful for determining the + // possible impact of this change. + repeated Advice advices = 5; +} + +// Generated advice about this change, used for providing more +// information about how a change will affect the existing service. +message Advice { + // Useful description for why this advice was applied and what actions should + // be taken to mitigate any implied risks. + string description = 2; +} + +// Classifies set of possible modifications to an object in the service +// configuration. +enum ChangeType { + // No value was provided. + CHANGE_TYPE_UNSPECIFIED = 0; + + // The changed object exists in the 'new' service configuration, but not + // in the 'old' service configuration. + ADDED = 1; + + // The changed object exists in the 'old' service configuration, but not + // in the 'new' service configuration. + REMOVED = 2; + + // The changed object exists in both service configurations, but its value + // is different. + MODIFIED = 3; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/consumer.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/consumer.proto new file mode 100644 index 000000000000..b7e5df1c8df5 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/consumer.proto @@ -0,0 +1,82 @@ +// Copyright 2016 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "ConsumerProto"; +option java_package = "com.google.api"; + +// A descriptor for defining project properties for a service. One service may +// have many consumer projects, and the service may want to behave differently +// depending on some properties on the project. For example, a project may be +// associated with a school, or a business, or a government agency, a business +// type property on the project may affect how a service responds to the client. +// This descriptor defines which properties are allowed to be set on a project. +// +// Example: +// +// project_properties: +// properties: +// - name: NO_WATERMARK +// type: BOOL +// description: Allows usage of the API without watermarks. +// - name: EXTENDED_TILE_CACHE_PERIOD +// type: INT64 +message ProjectProperties { + // List of per consumer project-specific properties. + repeated Property properties = 1; +} + +// Defines project properties. +// +// API services can define properties that can be assigned to consumer projects +// so that backends can perform response customization without having to make +// additional calls or maintain additional storage. For example, Maps API +// defines properties that controls map tile cache period, or whether to embed a +// watermark in a result. +// +// These values can be set via API producer console. Only API providers can +// define and set these properties. +message Property { + // Supported data type of the property values + enum PropertyType { + // The type is unspecified, and will result in an error. + UNSPECIFIED = 0; + + // The type is `int64`. + INT64 = 1; + + // The type is `bool`. + BOOL = 2; + + // The type is `string`. + STRING = 3; + + // The type is 'double'. + DOUBLE = 4; + } + + // The name of the property (a.k.a key). + string name = 1; + + // The type of this property. + PropertyType type = 2; + + // The description of the property + string description = 3; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/context.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/context.proto new file mode 100644 index 000000000000..1b1651780e7f --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/context.proto @@ -0,0 +1,90 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "ContextProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// `Context` defines which contexts an API requests. +// +// Example: +// +// context: +// rules: +// - selector: "*" +// requested: +// - google.rpc.context.ProjectContext +// - google.rpc.context.OriginContext +// +// The above specifies that all methods in the API request +// `google.rpc.context.ProjectContext` and +// `google.rpc.context.OriginContext`. +// +// Available context types are defined in package +// `google.rpc.context`. +// +// This also provides mechanism to allowlist any protobuf message extension that +// can be sent in grpc metadata using “x-goog-ext--bin†and +// “x-goog-ext--jspb†format. For example, list any service +// specific protobuf types that can appear in grpc metadata as follows in your +// yaml file: +// +// Example: +// +// context: +// rules: +// - selector: "google.example.library.v1.LibraryService.CreateBook" +// allowed_request_extensions: +// - google.foo.v1.NewExtension +// allowed_response_extensions: +// - google.foo.v1.NewExtension +// +// You can also specify extension ID instead of fully qualified extension name +// here. +message Context { + // A list of RPC context rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated ContextRule rules = 1; +} + +// A context rule provides information about the context for an individual API +// element. +message ContextRule { + // Selects the methods to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax + // details. + string selector = 1; + + // A list of full type names of requested contexts. + repeated string requested = 2; + + // A list of full type names of provided contexts. + repeated string provided = 3; + + // A list of full type names or extension IDs of extensions allowed in grpc + // side channel from client to backend. + repeated string allowed_request_extensions = 4; + + // A list of full type names or extension IDs of extensions allowed in grpc + // side channel from backend to client. + repeated string allowed_response_extensions = 5; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/control.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/control.proto new file mode 100644 index 000000000000..cbbce6f61f9b --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/control.proto @@ -0,0 +1,41 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/api/policy.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "ControlProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Selects and configures the service controller used by the service. +// +// Example: +// +// control: +// environment: servicecontrol.googleapis.com +message Control { + // The service controller environment to use. If empty, no control plane + // feature (like quota and billing) will be enabled. The recommended value for + // most services is servicecontrol.googleapis.com + string environment = 1; + + // Defines policies applying to the API methods of the service. + repeated MethodPolicy method_policies = 4; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/distribution.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/distribution.proto new file mode 100644 index 000000000000..b0bc4930f79e --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/distribution.proto @@ -0,0 +1,213 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/protobuf/any.proto"; +import "google/protobuf/timestamp.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/distribution;distribution"; +option java_multiple_files = true; +option java_outer_classname = "DistributionProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// `Distribution` contains summary statistics for a population of values. It +// optionally contains a histogram representing the distribution of those values +// across a set of buckets. +// +// The summary statistics are the count, mean, sum of the squared deviation from +// the mean, the minimum, and the maximum of the set of population of values. +// The histogram is based on a sequence of buckets and gives a count of values +// that fall into each bucket. The boundaries of the buckets are given either +// explicitly or by formulas for buckets of fixed or exponentially increasing +// widths. +// +// Although it is not forbidden, it is generally a bad idea to include +// non-finite values (infinities or NaNs) in the population of values, as this +// will render the `mean` and `sum_of_squared_deviation` fields meaningless. +message Distribution { + // The range of the population values. + message Range { + // The minimum of the population values. + double min = 1; + + // The maximum of the population values. + double max = 2; + } + + // `BucketOptions` describes the bucket boundaries used to create a histogram + // for the distribution. The buckets can be in a linear sequence, an + // exponential sequence, or each bucket can be specified explicitly. + // `BucketOptions` does not include the number of values in each bucket. + // + // A bucket has an inclusive lower bound and exclusive upper bound for the + // values that are counted for that bucket. The upper bound of a bucket must + // be strictly greater than the lower bound. The sequence of N buckets for a + // distribution consists of an underflow bucket (number 0), zero or more + // finite buckets (number 1 through N - 2) and an overflow bucket (number N - + // 1). The buckets are contiguous: the lower bound of bucket i (i > 0) is the + // same as the upper bound of bucket i - 1. The buckets span the whole range + // of finite values: lower bound of the underflow bucket is -infinity and the + // upper bound of the overflow bucket is +infinity. The finite buckets are + // so-called because both bounds are finite. + message BucketOptions { + // Specifies a linear sequence of buckets that all have the same width + // (except overflow and underflow). Each bucket represents a constant + // absolute uncertainty on the specific value in the bucket. + // + // There are `num_finite_buckets + 2` (= N) buckets. Bucket `i` has the + // following boundaries: + // + // Upper bound (0 <= i < N-1): offset + (width * i). + // + // Lower bound (1 <= i < N): offset + (width * (i - 1)). + message Linear { + // Must be greater than 0. + int32 num_finite_buckets = 1; + + // Must be greater than 0. + double width = 2; + + // Lower bound of the first bucket. + double offset = 3; + } + + // Specifies an exponential sequence of buckets that have a width that is + // proportional to the value of the lower bound. Each bucket represents a + // constant relative uncertainty on a specific value in the bucket. + // + // There are `num_finite_buckets + 2` (= N) buckets. Bucket `i` has the + // following boundaries: + // + // Upper bound (0 <= i < N-1): scale * (growth_factor ^ i). + // + // Lower bound (1 <= i < N): scale * (growth_factor ^ (i - 1)). + message Exponential { + // Must be greater than 0. + int32 num_finite_buckets = 1; + + // Must be greater than 1. + double growth_factor = 2; + + // Must be greater than 0. + double scale = 3; + } + + // Specifies a set of buckets with arbitrary widths. + // + // There are `size(bounds) + 1` (= N) buckets. Bucket `i` has the following + // boundaries: + // + // Upper bound (0 <= i < N-1): bounds[i] + // Lower bound (1 <= i < N); bounds[i - 1] + // + // The `bounds` field must contain at least one element. If `bounds` has + // only one element, then there are no finite buckets, and that single + // element is the common boundary of the overflow and underflow buckets. + message Explicit { + // The values must be monotonically increasing. + repeated double bounds = 1; + } + + // Exactly one of these three fields must be set. + oneof options { + // The linear bucket. + Linear linear_buckets = 1; + + // The exponential buckets. + Exponential exponential_buckets = 2; + + // The explicit buckets. + Explicit explicit_buckets = 3; + } + } + + // Exemplars are example points that may be used to annotate aggregated + // distribution values. They are metadata that gives information about a + // particular value added to a Distribution bucket, such as a trace ID that + // was active when a value was added. They may contain further information, + // such as a example values and timestamps, origin, etc. + message Exemplar { + // Value of the exemplar point. This value determines to which bucket the + // exemplar belongs. + double value = 1; + + // The observation (sampling) time of the above value. + google.protobuf.Timestamp timestamp = 2; + + // Contextual information about the example value. Examples are: + // + // Trace: type.googleapis.com/google.monitoring.v3.SpanContext + // + // Literal string: type.googleapis.com/google.protobuf.StringValue + // + // Labels dropped during aggregation: + // type.googleapis.com/google.monitoring.v3.DroppedLabels + // + // There may be only a single attachment of any given message type in a + // single exemplar, and this is enforced by the system. + repeated google.protobuf.Any attachments = 3; + } + + // The number of values in the population. Must be non-negative. This value + // must equal the sum of the values in `bucket_counts` if a histogram is + // provided. + int64 count = 1; + + // The arithmetic mean of the values in the population. If `count` is zero + // then this field must be zero. + double mean = 2; + + // The sum of squared deviations from the mean of the values in the + // population. For values x_i this is: + // + // Sum[i=1..n]((x_i - mean)^2) + // + // Knuth, "The Art of Computer Programming", Vol. 2, page 232, 3rd edition + // describes Welford's method for accumulating this sum in one pass. + // + // If `count` is zero then this field must be zero. + double sum_of_squared_deviation = 3; + + // If specified, contains the range of the population values. The field + // must not be present if the `count` is zero. + Range range = 4; + + // Defines the histogram bucket boundaries. If the distribution does not + // contain a histogram, then omit this field. + BucketOptions bucket_options = 6; + + // The number of values in each bucket of the histogram, as described in + // `bucket_options`. If the distribution does not have a histogram, then omit + // this field. If there is a histogram, then the sum of the values in + // `bucket_counts` must equal the value in the `count` field of the + // distribution. + // + // If present, `bucket_counts` should contain N values, where N is the number + // of buckets specified in `bucket_options`. If you supply fewer than N + // values, the remaining values are assumed to be 0. + // + // The order of the values in `bucket_counts` follows the bucket numbering + // schemes described for the three bucket types. The first value must be the + // count for the underflow bucket (number 0). The next N-2 values are the + // counts for the finite buckets (number 1 through N-2). The N'th value in + // `bucket_counts` is the count for the overflow bucket (number N-1). + repeated int64 bucket_counts = 7; + + // Must be in increasing order of `value` field. + repeated Exemplar exemplars = 10; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/documentation.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/documentation.proto new file mode 100644 index 000000000000..12936c701b42 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/documentation.proto @@ -0,0 +1,168 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "DocumentationProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// `Documentation` provides the information for describing a service. +// +// Example: +//
documentation:
+//   summary: >
+//     The Google Calendar API gives access
+//     to most calendar features.
+//   pages:
+//   - name: Overview
+//     content: (== include google/foo/overview.md ==)
+//   - name: Tutorial
+//     content: (== include google/foo/tutorial.md ==)
+//     subpages:
+//     - name: Java
+//       content: (== include google/foo/tutorial_java.md ==)
+//   rules:
+//   - selector: google.calendar.Calendar.Get
+//     description: >
+//       ...
+//   - selector: google.calendar.Calendar.Put
+//     description: >
+//       ...
+// 
+// Documentation is provided in markdown syntax. In addition to +// standard markdown features, definition lists, tables and fenced +// code blocks are supported. Section headers can be provided and are +// interpreted relative to the section nesting of the context where +// a documentation fragment is embedded. +// +// Documentation from the IDL is merged with documentation defined +// via the config at normalization time, where documentation provided +// by config rules overrides IDL provided. +// +// A number of constructs specific to the API platform are supported +// in documentation text. +// +// In order to reference a proto element, the following +// notation can be used: +//
[fully.qualified.proto.name][]
+// To override the display text used for the link, this can be used: +//
[display text][fully.qualified.proto.name]
+// Text can be excluded from doc using the following notation: +//
(-- internal comment --)
+// +// A few directives are available in documentation. Note that +// directives must appear on a single line to be properly +// identified. The `include` directive includes a markdown file from +// an external source: +//
(== include path/to/file ==)
+// The `resource_for` directive marks a message to be the resource of +// a collection in REST view. If it is not specified, tools attempt +// to infer the resource from the operations in a collection: +//
(== resource_for v1.shelves.books ==)
+// The directive `suppress_warning` does not directly affect documentation +// and is documented together with service config validation. +message Documentation { + // A short description of what the service does. The summary must be plain + // text. It becomes the overview of the service displayed in Google Cloud + // Console. + // NOTE: This field is equivalent to the standard field `description`. + string summary = 1; + + // The top level pages for the documentation set. + repeated Page pages = 5; + + // A list of documentation rules that apply to individual API elements. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated DocumentationRule rules = 3; + + // The URL to the root of documentation. + string documentation_root_url = 4; + + // Specifies the service root url if the default one (the service name + // from the yaml file) is not suitable. This can be seen in any fully + // specified service urls as well as sections that show a base that other + // urls are relative to. + string service_root_url = 6; + + // Declares a single overview page. For example: + //
documentation:
+  //   summary: ...
+  //   overview: (== include overview.md ==)
+  // 
+ // This is a shortcut for the following declaration (using pages style): + //
documentation:
+  //   summary: ...
+  //   pages:
+  //   - name: Overview
+  //     content: (== include overview.md ==)
+  // 
+ // Note: you cannot specify both `overview` field and `pages` field. + string overview = 2; +} + +// A documentation rule provides information about individual API elements. +message DocumentationRule { + // The selector is a comma-separated list of patterns for any element such as + // a method, a field, an enum value. Each pattern is a qualified name of the + // element which may end in "*", indicating a wildcard. Wildcards are only + // allowed at the end and for a whole component of the qualified name, + // i.e. "foo.*" is ok, but not "foo.b*" or "foo.*.bar". A wildcard will match + // one or more components. To specify a default for all applicable elements, + // the whole pattern "*" is used. + string selector = 1; + + // Description of the selected proto element (e.g. a message, a method, a + // 'service' definition, or a field). Defaults to leading & trailing comments + // taken from the proto source definition of the proto element. + string description = 2; + + // Deprecation description of the selected element(s). It can be provided if + // an element is marked as `deprecated`. + string deprecation_description = 3; +} + +// Represents a documentation page. A page can contain subpages to represent +// nested documentation set structure. +message Page { + // The name of the page. It will be used as an identity of the page to + // generate URI of the page, text of the link to this page in navigation, + // etc. The full page name (start from the root page name to this page + // concatenated with `.`) can be used as reference to the page in your + // documentation. For example: + //
pages:
+  // - name: Tutorial
+  //   content: (== include tutorial.md ==)
+  //   subpages:
+  //   - name: Java
+  //     content: (== include tutorial_java.md ==)
+  // 
+ // You can reference `Java` page using Markdown reference link syntax: + // `[Java][Tutorial.Java]`. + string name = 1; + + // The Markdown content of the page. You can use (== include {path} + // ==) to include content from a Markdown file. The content can be + // used to produce the documentation page such as HTML format page. + string content = 2; + + // Subpages of this page. The order of subpages specified here will be + // honored in the generated docset. + repeated Page subpages = 3; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/endpoint.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/endpoint.proto new file mode 100644 index 000000000000..7f6dca7c16c6 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/endpoint.proto @@ -0,0 +1,73 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "EndpointProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// `Endpoint` describes a network address of a service that serves a set of +// APIs. It is commonly known as a service endpoint. A service may expose +// any number of service endpoints, and all service endpoints share the same +// service definition, such as quota limits and monitoring metrics. +// +// Example: +// +// type: google.api.Service +// name: library-example.googleapis.com +// endpoints: +// # Declares network address `https://library-example.googleapis.com` +// # for service `library-example.googleapis.com`. The `https` scheme +// # is implicit for all service endpoints. Other schemes may be +// # supported in the future. +// - name: library-example.googleapis.com +// allow_cors: false +// - name: content-staging-library-example.googleapis.com +// # Allows HTTP OPTIONS calls to be passed to the API frontend, for it +// # to decide whether the subsequent cross-origin request is allowed +// # to proceed. +// allow_cors: true +message Endpoint { + // The canonical name of this endpoint. + string name = 1; + + // Unimplemented. Dot not use. + // + // DEPRECATED: This field is no longer supported. Instead of using aliases, + // please specify multiple [google.api.Endpoint][google.api.Endpoint] for each + // of the intended aliases. + // + // Additional names that this endpoint will be hosted on. + repeated string aliases = 2 [deprecated = true]; + + // The specification of an Internet routable address of API frontend that will + // handle requests to this [API + // Endpoint](https://cloud.google.com/apis/design/glossary). It should be + // either a valid IPv4 address or a fully-qualified domain name. For example, + // "8.8.8.8" or "myservice.appspot.com". + string target = 101; + + // Allowing + // [CORS](https://en.wikipedia.org/wiki/Cross-origin_resource_sharing), aka + // cross-domain traffic, would allow the backends served from this endpoint to + // receive and respond to HTTP OPTIONS requests. The response will be used by + // the browser to determine whether the subsequent cross-origin request is + // allowed to proceed. + bool allow_cors = 5; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/error_reason.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/error_reason.proto new file mode 100644 index 000000000000..cf806698dacb --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/error_reason.proto @@ -0,0 +1,589 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/error_reason;error_reason"; +option java_multiple_files = true; +option java_outer_classname = "ErrorReasonProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Defines the supported values for `google.rpc.ErrorInfo.reason` for the +// `googleapis.com` error domain. This error domain is reserved for [Service +// Infrastructure](https://cloud.google.com/service-infrastructure/docs/overview). +// For each error info of this domain, the metadata key "service" refers to the +// logical identifier of an API service, such as "pubsub.googleapis.com". The +// "consumer" refers to the entity that consumes an API Service. It typically is +// a Google project that owns the client application or the server resource, +// such as "projects/123". Other metadata keys are specific to each error +// reason. For more information, see the definition of the specific error +// reason. +enum ErrorReason { + // Do not use this default value. + ERROR_REASON_UNSPECIFIED = 0; + + // The request is calling a disabled service for a consumer. + // + // Example of an ErrorInfo when the consumer "projects/123" contacting + // "pubsub.googleapis.com" service which is disabled: + // + // { "reason": "SERVICE_DISABLED", + // "domain": "googleapis.com", + // "metadata": { + // "consumer": "projects/123", + // "service": "pubsub.googleapis.com" + // } + // } + // + // This response indicates the "pubsub.googleapis.com" has been disabled in + // "projects/123". + SERVICE_DISABLED = 1; + + // The request whose associated billing account is disabled. + // + // Example of an ErrorInfo when the consumer "projects/123" fails to contact + // "pubsub.googleapis.com" service because the associated billing account is + // disabled: + // + // { "reason": "BILLING_DISABLED", + // "domain": "googleapis.com", + // "metadata": { + // "consumer": "projects/123", + // "service": "pubsub.googleapis.com" + // } + // } + // + // This response indicates the billing account associated has been disabled. + BILLING_DISABLED = 2; + + // The request is denied because the provided [API + // key](https://cloud.google.com/docs/authentication/api-keys) is invalid. It + // may be in a bad format, cannot be found, or has been expired). + // + // Example of an ErrorInfo when the request is contacting + // "storage.googleapis.com" service with an invalid API key: + // + // { "reason": "API_KEY_INVALID", + // "domain": "googleapis.com", + // "metadata": { + // "service": "storage.googleapis.com", + // } + // } + API_KEY_INVALID = 3; + + // The request is denied because it violates [API key API + // restrictions](https://cloud.google.com/docs/authentication/api-keys#adding_api_restrictions). + // + // Example of an ErrorInfo when the consumer "projects/123" fails to call the + // "storage.googleapis.com" service because this service is restricted in the + // API key: + // + // { "reason": "API_KEY_SERVICE_BLOCKED", + // "domain": "googleapis.com", + // "metadata": { + // "consumer": "projects/123", + // "service": "storage.googleapis.com" + // } + // } + API_KEY_SERVICE_BLOCKED = 4; + + // The request is denied because it violates [API key HTTP + // restrictions](https://cloud.google.com/docs/authentication/api-keys#adding_http_restrictions). + // + // Example of an ErrorInfo when the consumer "projects/123" fails to call + // "storage.googleapis.com" service because the http referrer of the request + // violates API key HTTP restrictions: + // + // { "reason": "API_KEY_HTTP_REFERRER_BLOCKED", + // "domain": "googleapis.com", + // "metadata": { + // "consumer": "projects/123", + // "service": "storage.googleapis.com", + // } + // } + API_KEY_HTTP_REFERRER_BLOCKED = 7; + + // The request is denied because it violates [API key IP address + // restrictions](https://cloud.google.com/docs/authentication/api-keys#adding_application_restrictions). + // + // Example of an ErrorInfo when the consumer "projects/123" fails to call + // "storage.googleapis.com" service because the caller IP of the request + // violates API key IP address restrictions: + // + // { "reason": "API_KEY_IP_ADDRESS_BLOCKED", + // "domain": "googleapis.com", + // "metadata": { + // "consumer": "projects/123", + // "service": "storage.googleapis.com", + // } + // } + API_KEY_IP_ADDRESS_BLOCKED = 8; + + // The request is denied because it violates [API key Android application + // restrictions](https://cloud.google.com/docs/authentication/api-keys#adding_application_restrictions). + // + // Example of an ErrorInfo when the consumer "projects/123" fails to call + // "storage.googleapis.com" service because the request from the Android apps + // violates the API key Android application restrictions: + // + // { "reason": "API_KEY_ANDROID_APP_BLOCKED", + // "domain": "googleapis.com", + // "metadata": { + // "consumer": "projects/123", + // "service": "storage.googleapis.com" + // } + // } + API_KEY_ANDROID_APP_BLOCKED = 9; + + // The request is denied because it violates [API key iOS application + // restrictions](https://cloud.google.com/docs/authentication/api-keys#adding_application_restrictions). + // + // Example of an ErrorInfo when the consumer "projects/123" fails to call + // "storage.googleapis.com" service because the request from the iOS apps + // violates the API key iOS application restrictions: + // + // { "reason": "API_KEY_IOS_APP_BLOCKED", + // "domain": "googleapis.com", + // "metadata": { + // "consumer": "projects/123", + // "service": "storage.googleapis.com" + // } + // } + API_KEY_IOS_APP_BLOCKED = 13; + + // The request is denied because there is not enough rate quota for the + // consumer. + // + // Example of an ErrorInfo when the consumer "projects/123" fails to contact + // "pubsub.googleapis.com" service because consumer's rate quota usage has + // reached the maximum value set for the quota limit + // "ReadsPerMinutePerProject" on the quota metric + // "pubsub.googleapis.com/read_requests": + // + // { "reason": "RATE_LIMIT_EXCEEDED", + // "domain": "googleapis.com", + // "metadata": { + // "consumer": "projects/123", + // "service": "pubsub.googleapis.com", + // "quota_metric": "pubsub.googleapis.com/read_requests", + // "quota_limit": "ReadsPerMinutePerProject" + // } + // } + // + // Example of an ErrorInfo when the consumer "projects/123" checks quota on + // the service "dataflow.googleapis.com" and hits the organization quota + // limit "DefaultRequestsPerMinutePerOrganization" on the metric + // "dataflow.googleapis.com/default_requests". + // + // { "reason": "RATE_LIMIT_EXCEEDED", + // "domain": "googleapis.com", + // "metadata": { + // "consumer": "projects/123", + // "service": "dataflow.googleapis.com", + // "quota_metric": "dataflow.googleapis.com/default_requests", + // "quota_limit": "DefaultRequestsPerMinutePerOrganization" + // } + // } + RATE_LIMIT_EXCEEDED = 5; + + // The request is denied because there is not enough resource quota for the + // consumer. + // + // Example of an ErrorInfo when the consumer "projects/123" fails to contact + // "compute.googleapis.com" service because consumer's resource quota usage + // has reached the maximum value set for the quota limit "VMsPerProject" + // on the quota metric "compute.googleapis.com/vms": + // + // { "reason": "RESOURCE_QUOTA_EXCEEDED", + // "domain": "googleapis.com", + // "metadata": { + // "consumer": "projects/123", + // "service": "compute.googleapis.com", + // "quota_metric": "compute.googleapis.com/vms", + // "quota_limit": "VMsPerProject" + // } + // } + // + // Example of an ErrorInfo when the consumer "projects/123" checks resource + // quota on the service "dataflow.googleapis.com" and hits the organization + // quota limit "jobs-per-organization" on the metric + // "dataflow.googleapis.com/job_count". + // + // { "reason": "RESOURCE_QUOTA_EXCEEDED", + // "domain": "googleapis.com", + // "metadata": { + // "consumer": "projects/123", + // "service": "dataflow.googleapis.com", + // "quota_metric": "dataflow.googleapis.com/job_count", + // "quota_limit": "jobs-per-organization" + // } + // } + RESOURCE_QUOTA_EXCEEDED = 6; + + // The request whose associated billing account address is in a tax restricted + // location, violates the local tax restrictions when creating resources in + // the restricted region. + // + // Example of an ErrorInfo when creating the Cloud Storage Bucket in the + // container "projects/123" under a tax restricted region + // "locations/asia-northeast3": + // + // { "reason": "LOCATION_TAX_POLICY_VIOLATED", + // "domain": "googleapis.com", + // "metadata": { + // "consumer": "projects/123", + // "service": "storage.googleapis.com", + // "location": "locations/asia-northeast3" + // } + // } + // + // This response indicates creating the Cloud Storage Bucket in + // "locations/asia-northeast3" violates the location tax restriction. + LOCATION_TAX_POLICY_VIOLATED = 10; + + // The request is denied because the caller does not have required permission + // on the user project "projects/123" or the user project is invalid. For more + // information, check the [userProject System + // Parameters](https://cloud.google.com/apis/docs/system-parameters). + // + // Example of an ErrorInfo when the caller is calling Cloud Storage service + // with insufficient permissions on the user project: + // + // { "reason": "USER_PROJECT_DENIED", + // "domain": "googleapis.com", + // "metadata": { + // "consumer": "projects/123", + // "service": "storage.googleapis.com" + // } + // } + USER_PROJECT_DENIED = 11; + + // The request is denied because the consumer "projects/123" is suspended due + // to Terms of Service(Tos) violations. Check [Project suspension + // guidelines](https://cloud.google.com/resource-manager/docs/project-suspension-guidelines) + // for more information. + // + // Example of an ErrorInfo when calling Cloud Storage service with the + // suspended consumer "projects/123": + // + // { "reason": "CONSUMER_SUSPENDED", + // "domain": "googleapis.com", + // "metadata": { + // "consumer": "projects/123", + // "service": "storage.googleapis.com" + // } + // } + CONSUMER_SUSPENDED = 12; + + // The request is denied because the associated consumer is invalid. It may be + // in a bad format, cannot be found, or have been deleted. + // + // Example of an ErrorInfo when calling Cloud Storage service with the + // invalid consumer "projects/123": + // + // { "reason": "CONSUMER_INVALID", + // "domain": "googleapis.com", + // "metadata": { + // "consumer": "projects/123", + // "service": "storage.googleapis.com" + // } + // } + CONSUMER_INVALID = 14; + + // The request is denied because it violates [VPC Service + // Controls](https://cloud.google.com/vpc-service-controls/docs/overview). + // The 'uid' field is a random generated identifier that customer can use it + // to search the audit log for a request rejected by VPC Service Controls. For + // more information, please refer [VPC Service Controls + // Troubleshooting](https://cloud.google.com/vpc-service-controls/docs/troubleshooting#unique-id) + // + // Example of an ErrorInfo when the consumer "projects/123" fails to call + // Cloud Storage service because the request is prohibited by the VPC Service + // Controls. + // + // { "reason": "SECURITY_POLICY_VIOLATED", + // "domain": "googleapis.com", + // "metadata": { + // "uid": "123456789abcde", + // "consumer": "projects/123", + // "service": "storage.googleapis.com" + // } + // } + SECURITY_POLICY_VIOLATED = 15; + + // The request is denied because the provided access token has expired. + // + // Example of an ErrorInfo when the request is calling Cloud Storage service + // with an expired access token: + // + // { "reason": "ACCESS_TOKEN_EXPIRED", + // "domain": "googleapis.com", + // "metadata": { + // "service": "storage.googleapis.com", + // "method": "google.storage.v1.Storage.GetObject" + // } + // } + ACCESS_TOKEN_EXPIRED = 16; + + // The request is denied because the provided access token doesn't have at + // least one of the acceptable scopes required for the API. Please check + // [OAuth 2.0 Scopes for Google + // APIs](https://developers.google.com/identity/protocols/oauth2/scopes) for + // the list of the OAuth 2.0 scopes that you might need to request to access + // the API. + // + // Example of an ErrorInfo when the request is calling Cloud Storage service + // with an access token that is missing required scopes: + // + // { "reason": "ACCESS_TOKEN_SCOPE_INSUFFICIENT", + // "domain": "googleapis.com", + // "metadata": { + // "service": "storage.googleapis.com", + // "method": "google.storage.v1.Storage.GetObject" + // } + // } + ACCESS_TOKEN_SCOPE_INSUFFICIENT = 17; + + // The request is denied because the account associated with the provided + // access token is in an invalid state, such as disabled or deleted. + // For more information, see https://cloud.google.com/docs/authentication. + // + // Warning: For privacy reasons, the server may not be able to disclose the + // email address for some accounts. The client MUST NOT depend on the + // availability of the `email` attribute. + // + // Example of an ErrorInfo when the request is to the Cloud Storage API with + // an access token that is associated with a disabled or deleted [service + // account](http://cloud/iam/docs/service-accounts): + // + // { "reason": "ACCOUNT_STATE_INVALID", + // "domain": "googleapis.com", + // "metadata": { + // "service": "storage.googleapis.com", + // "method": "google.storage.v1.Storage.GetObject", + // "email": "user@123.iam.gserviceaccount.com" + // } + // } + ACCOUNT_STATE_INVALID = 18; + + // The request is denied because the type of the provided access token is not + // supported by the API being called. + // + // Example of an ErrorInfo when the request is to the Cloud Storage API with + // an unsupported token type. + // + // { "reason": "ACCESS_TOKEN_TYPE_UNSUPPORTED", + // "domain": "googleapis.com", + // "metadata": { + // "service": "storage.googleapis.com", + // "method": "google.storage.v1.Storage.GetObject" + // } + // } + ACCESS_TOKEN_TYPE_UNSUPPORTED = 19; + + // The request is denied because the request doesn't have any authentication + // credentials. For more information regarding the supported authentication + // strategies for Google Cloud APIs, see + // https://cloud.google.com/docs/authentication. + // + // Example of an ErrorInfo when the request is to the Cloud Storage API + // without any authentication credentials. + // + // { "reason": "CREDENTIALS_MISSING", + // "domain": "googleapis.com", + // "metadata": { + // "service": "storage.googleapis.com", + // "method": "google.storage.v1.Storage.GetObject" + // } + // } + CREDENTIALS_MISSING = 20; + + // The request is denied because the provided project owning the resource + // which acts as the [API + // consumer](https://cloud.google.com/apis/design/glossary#api_consumer) is + // invalid. It may be in a bad format or empty. + // + // Example of an ErrorInfo when the request is to the Cloud Functions API, + // but the offered resource project in the request in a bad format which can't + // perform the ListFunctions method. + // + // { "reason": "RESOURCE_PROJECT_INVALID", + // "domain": "googleapis.com", + // "metadata": { + // "service": "cloudfunctions.googleapis.com", + // "method": + // "google.cloud.functions.v1.CloudFunctionsService.ListFunctions" + // } + // } + RESOURCE_PROJECT_INVALID = 21; + + // The request is denied because the provided session cookie is missing, + // invalid or failed to decode. + // + // Example of an ErrorInfo when the request is calling Cloud Storage service + // with a SID cookie which can't be decoded. + // + // { "reason": "SESSION_COOKIE_INVALID", + // "domain": "googleapis.com", + // "metadata": { + // "service": "storage.googleapis.com", + // "method": "google.storage.v1.Storage.GetObject", + // "cookie": "SID" + // } + // } + SESSION_COOKIE_INVALID = 23; + + // The request is denied because the user is from a Google Workspace customer + // that blocks their users from accessing a particular service. + // + // Example scenario: https://support.google.com/a/answer/9197205?hl=en + // + // Example of an ErrorInfo when access to Google Cloud Storage service is + // blocked by the Google Workspace administrator: + // + // { "reason": "USER_BLOCKED_BY_ADMIN", + // "domain": "googleapis.com", + // "metadata": { + // "service": "storage.googleapis.com", + // "method": "google.storage.v1.Storage.GetObject", + // } + // } + USER_BLOCKED_BY_ADMIN = 24; + + // The request is denied because the resource service usage is restricted + // by administrators according to the organization policy constraint. + // For more information see + // https://cloud.google.com/resource-manager/docs/organization-policy/restricting-services. + // + // Example of an ErrorInfo when access to Google Cloud Storage service is + // restricted by Resource Usage Restriction policy: + // + // { "reason": "RESOURCE_USAGE_RESTRICTION_VIOLATED", + // "domain": "googleapis.com", + // "metadata": { + // "consumer": "projects/project-123", + // "service": "storage.googleapis.com" + // } + // } + RESOURCE_USAGE_RESTRICTION_VIOLATED = 25; + + // Unimplemented. Do not use. + // + // The request is denied because it contains unsupported system parameters in + // URL query parameters or HTTP headers. For more information, + // see https://cloud.google.com/apis/docs/system-parameters + // + // Example of an ErrorInfo when access "pubsub.googleapis.com" service with + // a request header of "x-goog-user-ip": + // + // { "reason": "SYSTEM_PARAMETER_UNSUPPORTED", + // "domain": "googleapis.com", + // "metadata": { + // "service": "pubsub.googleapis.com" + // "parameter": "x-goog-user-ip" + // } + // } + SYSTEM_PARAMETER_UNSUPPORTED = 26; + + // The request is denied because it violates Org Restriction: the requested + // resource does not belong to allowed organizations specified in + // "X-Goog-Allowed-Resources" header. + // + // Example of an ErrorInfo when accessing a GCP resource that is restricted by + // Org Restriction for "pubsub.googleapis.com" service. + // + // { + // reason: "ORG_RESTRICTION_VIOLATION" + // domain: "googleapis.com" + // metadata { + // "consumer":"projects/123456" + // "service": "pubsub.googleapis.com" + // } + // } + ORG_RESTRICTION_VIOLATION = 27; + + // The request is denied because "X-Goog-Allowed-Resources" header is in a bad + // format. + // + // Example of an ErrorInfo when + // accessing "pubsub.googleapis.com" service with an invalid + // "X-Goog-Allowed-Resources" request header. + // + // { + // reason: "ORG_RESTRICTION_HEADER_INVALID" + // domain: "googleapis.com" + // metadata { + // "consumer":"projects/123456" + // "service": "pubsub.googleapis.com" + // } + // } + ORG_RESTRICTION_HEADER_INVALID = 28; + + // Unimplemented. Do not use. + // + // The request is calling a service that is not visible to the consumer. + // + // Example of an ErrorInfo when the consumer "projects/123" contacting + // "pubsub.googleapis.com" service which is not visible to the consumer. + // + // { "reason": "SERVICE_NOT_VISIBLE", + // "domain": "googleapis.com", + // "metadata": { + // "consumer": "projects/123", + // "service": "pubsub.googleapis.com" + // } + // } + // + // This response indicates the "pubsub.googleapis.com" is not visible to + // "projects/123" (or it may not exist). + SERVICE_NOT_VISIBLE = 29; + + // The request is related to a project for which GCP access is suspended. + // + // Example of an ErrorInfo when the consumer "projects/123" fails to contact + // "pubsub.googleapis.com" service because GCP access is suspended: + // + // { "reason": "GCP_SUSPENDED", + // "domain": "googleapis.com", + // "metadata": { + // "consumer": "projects/123", + // "service": "pubsub.googleapis.com" + // } + // } + // + // This response indicates the associated GCP account has been suspended. + GCP_SUSPENDED = 30; + + // The request violates the location policies when creating resources in + // the restricted region. + // + // Example of an ErrorInfo when creating the Cloud Storage Bucket by + // "projects/123" for service storage.googleapis.com: + // + // { "reason": "LOCATION_POLICY_VIOLATED", + // "domain": "googleapis.com", + // "metadata": { + // "consumer": "projects/123", + // "service": "storage.googleapis.com", + // } + // } + // + // This response indicates creating the Cloud Storage Bucket in + // "locations/asia-northeast3" violates at least one location policy. + // The troubleshooting guidance is provided in the Help links. + LOCATION_POLICY_VIOLATED = 31; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/field_behavior.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/field_behavior.proto new file mode 100644 index 000000000000..21895bf55278 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/field_behavior.proto @@ -0,0 +1,104 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/protobuf/descriptor.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "FieldBehaviorProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.FieldOptions { + // A designation of a specific field behavior (required, output only, etc.) + // in protobuf messages. + // + // Examples: + // + // string name = 1 [(google.api.field_behavior) = REQUIRED]; + // State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + // google.protobuf.Duration ttl = 1 + // [(google.api.field_behavior) = INPUT_ONLY]; + // google.protobuf.Timestamp expire_time = 1 + // [(google.api.field_behavior) = OUTPUT_ONLY, + // (google.api.field_behavior) = IMMUTABLE]; + repeated google.api.FieldBehavior field_behavior = 1052 [packed = false]; +} + +// An indicator of the behavior of a given field (for example, that a field +// is required in requests, or given as output but ignored as input). +// This **does not** change the behavior in protocol buffers itself; it only +// denotes the behavior and may affect how API tooling handles the field. +// +// Note: This enum **may** receive new values in the future. +enum FieldBehavior { + // Conventional default for enums. Do not use this. + FIELD_BEHAVIOR_UNSPECIFIED = 0; + + // Specifically denotes a field as optional. + // While all fields in protocol buffers are optional, this may be specified + // for emphasis if appropriate. + OPTIONAL = 1; + + // Denotes a field as required. + // This indicates that the field **must** be provided as part of the request, + // and failure to do so will cause an error (usually `INVALID_ARGUMENT`). + REQUIRED = 2; + + // Denotes a field as output only. + // This indicates that the field is provided in responses, but including the + // field in a request does nothing (the server *must* ignore it and + // *must not* throw an error as a result of the field's presence). + OUTPUT_ONLY = 3; + + // Denotes a field as input only. + // This indicates that the field is provided in requests, and the + // corresponding field is not included in output. + INPUT_ONLY = 4; + + // Denotes a field as immutable. + // This indicates that the field may be set once in a request to create a + // resource, but may not be changed thereafter. + IMMUTABLE = 5; + + // Denotes that a (repeated) field is an unordered list. + // This indicates that the service may provide the elements of the list + // in any arbitrary order, rather than the order the user originally + // provided. Additionally, the list's order may or may not be stable. + UNORDERED_LIST = 6; + + // Denotes that this field returns a non-empty default value if not set. + // This indicates that if the user provides the empty value in a request, + // a non-empty value will be returned. The user will not be aware of what + // non-empty value to expect. + NON_EMPTY_DEFAULT = 7; + + // Denotes that the field in a resource (a message annotated with + // google.api.resource) is used in the resource name to uniquely identify the + // resource. For AIP-compliant APIs, this should only be applied to the + // `name` field on the resource. + // + // This behavior should not be applied to references to other resources within + // the message. + // + // The identifier field of resources often have different field behavior + // depending on the request it is embedded in (e.g. for Create methods name + // is optional and unused, while for Update methods it is required). Instead + // of method-specific annotations, only `IDENTIFIER` is required. + IDENTIFIER = 8; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/field_info.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/field_info.proto new file mode 100644 index 000000000000..e62d84579d41 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/field_info.proto @@ -0,0 +1,79 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/protobuf/descriptor.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "FieldInfoProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.FieldOptions { + // Rich semantic descriptor of an API field beyond the basic typing. + // + // Examples: + // + // string request_id = 1 [(google.api.field_info).format = UUID4]; + // string old_ip_address = 2 [(google.api.field_info).format = IPV4]; + // string new_ip_address = 3 [(google.api.field_info).format = IPV6]; + // string actual_ip_address = 4 [ + // (google.api.field_info).format = IPV4_OR_IPV6 + // ]; + google.api.FieldInfo field_info = 291403980; +} + +// Rich semantic information of an API field beyond basic typing. +message FieldInfo { + // The standard format of a field value. The supported formats are all backed + // by either an RFC defined by the IETF or a Google-defined AIP. + enum Format { + // Default, unspecified value. + FORMAT_UNSPECIFIED = 0; + + // Universally Unique Identifier, version 4, value as defined by + // https://datatracker.ietf.org/doc/html/rfc4122. The value may be + // normalized to entirely lowercase letters. For example, the value + // `F47AC10B-58CC-0372-8567-0E02B2C3D479` would be normalized to + // `f47ac10b-58cc-0372-8567-0e02b2c3d479`. + UUID4 = 1; + + // Internet Protocol v4 value as defined by [RFC + // 791](https://datatracker.ietf.org/doc/html/rfc791). The value may be + // condensed, with leading zeros in each octet stripped. For example, + // `001.022.233.040` would be condensed to `1.22.233.40`. + IPV4 = 2; + + // Internet Protocol v6 value as defined by [RFC + // 2460](https://datatracker.ietf.org/doc/html/rfc2460). The value may be + // normalized to entirely lowercase letters with zeros compressed, following + // [RFC 5952](https://datatracker.ietf.org/doc/html/rfc5952). For example, + // the value `2001:0DB8:0::0` would be normalized to `2001:db8::`. + IPV6 = 3; + + // An IP address in either v4 or v6 format as described by the individual + // values defined herein. See the comments on the IPV4 and IPV6 types for + // allowed normalizations of each. + IPV4_OR_IPV6 = 4; + } + + // The standard format of a field value. This does not explicitly configure + // any API consumer, just documents the API's format for the field it is + // applied to. + Format format = 1; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/http.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/http.proto new file mode 100644 index 000000000000..31d867a27d5a --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/http.proto @@ -0,0 +1,379 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "HttpProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Defines the HTTP configuration for an API service. It contains a list of +// [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method +// to one or more HTTP REST API methods. +message Http { + // A list of HTTP configuration rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated HttpRule rules = 1; + + // When set to true, URL path parameters will be fully URI-decoded except in + // cases of single segment matches in reserved expansion, where "%2F" will be + // left encoded. + // + // The default behavior is to not decode RFC 6570 reserved characters in multi + // segment matches. + bool fully_decode_reserved_expansion = 2; +} + +// # gRPC Transcoding +// +// gRPC Transcoding is a feature for mapping between a gRPC method and one or +// more HTTP REST endpoints. It allows developers to build a single API service +// that supports both gRPC APIs and REST APIs. Many systems, including [Google +// APIs](https://github.com/googleapis/googleapis), +// [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC +// Gateway](https://github.com/grpc-ecosystem/grpc-gateway), +// and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature +// and use it for large scale production services. +// +// `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies +// how different portions of the gRPC request message are mapped to the URL +// path, URL query parameters, and HTTP request body. It also controls how the +// gRPC response message is mapped to the HTTP response body. `HttpRule` is +// typically specified as an `google.api.http` annotation on the gRPC method. +// +// Each mapping specifies a URL path template and an HTTP method. The path +// template may refer to one or more fields in the gRPC request message, as long +// as each field is a non-repeated field with a primitive (non-message) type. +// The path template controls how fields of the request message are mapped to +// the URL path. +// +// Example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get: "/v1/{name=messages/*}" +// }; +// } +// } +// message GetMessageRequest { +// string name = 1; // Mapped to URL path. +// } +// message Message { +// string text = 1; // The resource content. +// } +// +// This enables an HTTP REST to gRPC mapping as below: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")` +// +// Any fields in the request message which are not bound by the path template +// automatically become HTTP query parameters if there is no HTTP request body. +// For example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get:"/v1/messages/{message_id}" +// }; +// } +// } +// message GetMessageRequest { +// message SubMessage { +// string subfield = 1; +// } +// string message_id = 1; // Mapped to URL path. +// int64 revision = 2; // Mapped to URL query parameter `revision`. +// SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. +// } +// +// This enables a HTTP JSON to RPC mapping as below: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456?revision=2&sub.subfield=foo` | +// `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: +// "foo"))` +// +// Note that fields which are mapped to URL query parameters must have a +// primitive type or a repeated primitive type or a non-repeated message type. +// In the case of a repeated type, the parameter can be repeated in the URL +// as `...?param=A¶m=B`. In the case of a message type, each field of the +// message is mapped to a separate parameter, such as +// `...?foo.a=A&foo.b=B&foo.c=C`. +// +// For HTTP methods that allow a request body, the `body` field +// specifies the mapping. Consider a REST update method on the +// message resource collection: +// +// service Messaging { +// rpc UpdateMessage(UpdateMessageRequest) returns (Message) { +// option (google.api.http) = { +// patch: "/v1/messages/{message_id}" +// body: "message" +// }; +// } +// } +// message UpdateMessageRequest { +// string message_id = 1; // mapped to the URL +// Message message = 2; // mapped to the body +// } +// +// The following HTTP JSON to RPC mapping is enabled, where the +// representation of the JSON in the request body is determined by +// protos JSON encoding: +// +// HTTP | gRPC +// -----|----- +// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: +// "123456" message { text: "Hi!" })` +// +// The special name `*` can be used in the body mapping to define that +// every field not bound by the path template should be mapped to the +// request body. This enables the following alternative definition of +// the update method: +// +// service Messaging { +// rpc UpdateMessage(Message) returns (Message) { +// option (google.api.http) = { +// patch: "/v1/messages/{message_id}" +// body: "*" +// }; +// } +// } +// message Message { +// string message_id = 1; +// string text = 2; +// } +// +// +// The following HTTP JSON to RPC mapping is enabled: +// +// HTTP | gRPC +// -----|----- +// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: +// "123456" text: "Hi!")` +// +// Note that when using `*` in the body mapping, it is not possible to +// have HTTP parameters, as all fields not bound by the path end in +// the body. This makes this option more rarely used in practice when +// defining REST APIs. The common usage of `*` is in custom methods +// which don't use the URL at all for transferring data. +// +// It is possible to define multiple HTTP methods for one RPC by using +// the `additional_bindings` option. Example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get: "/v1/messages/{message_id}" +// additional_bindings { +// get: "/v1/users/{user_id}/messages/{message_id}" +// } +// }; +// } +// } +// message GetMessageRequest { +// string message_id = 1; +// string user_id = 2; +// } +// +// This enables the following two alternative HTTP JSON to RPC mappings: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` +// `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: +// "123456")` +// +// ## Rules for HTTP mapping +// +// 1. Leaf request fields (recursive expansion nested messages in the request +// message) are classified into three categories: +// - Fields referred by the path template. They are passed via the URL path. +// - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They +// are passed via the HTTP +// request body. +// - All other fields are passed via the URL query parameters, and the +// parameter name is the field path in the request message. A repeated +// field can be represented as multiple query parameters under the same +// name. +// 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL +// query parameter, all fields +// are passed via URL path and HTTP request body. +// 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP +// request body, all +// fields are passed via URL path and URL query parameters. +// +// ### Path template syntax +// +// Template = "/" Segments [ Verb ] ; +// Segments = Segment { "/" Segment } ; +// Segment = "*" | "**" | LITERAL | Variable ; +// Variable = "{" FieldPath [ "=" Segments ] "}" ; +// FieldPath = IDENT { "." IDENT } ; +// Verb = ":" LITERAL ; +// +// The syntax `*` matches a single URL path segment. The syntax `**` matches +// zero or more URL path segments, which must be the last part of the URL path +// except the `Verb`. +// +// The syntax `Variable` matches part of the URL path as specified by its +// template. A variable template must not contain other variables. If a variable +// matches a single path segment, its template may be omitted, e.g. `{var}` +// is equivalent to `{var=*}`. +// +// The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` +// contains any reserved character, such characters should be percent-encoded +// before the matching. +// +// If a variable contains exactly one path segment, such as `"{var}"` or +// `"{var=*}"`, when such a variable is expanded into a URL path on the client +// side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The +// server side does the reverse decoding. Such variables show up in the +// [Discovery +// Document](https://developers.google.com/discovery/v1/reference/apis) as +// `{var}`. +// +// If a variable contains multiple path segments, such as `"{var=foo/*}"` +// or `"{var=**}"`, when such a variable is expanded into a URL path on the +// client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. +// The server side does the reverse decoding, except "%2F" and "%2f" are left +// unchanged. Such variables show up in the +// [Discovery +// Document](https://developers.google.com/discovery/v1/reference/apis) as +// `{+var}`. +// +// ## Using gRPC API Service Configuration +// +// gRPC API Service Configuration (service config) is a configuration language +// for configuring a gRPC service to become a user-facing product. The +// service config is simply the YAML representation of the `google.api.Service` +// proto message. +// +// As an alternative to annotating your proto file, you can configure gRPC +// transcoding in your service config YAML files. You do this by specifying a +// `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same +// effect as the proto annotation. This can be particularly useful if you +// have a proto that is reused in multiple services. Note that any transcoding +// specified in the service config will override any matching transcoding +// configuration in the proto. +// +// Example: +// +// http: +// rules: +// # Selects a gRPC method and applies HttpRule to it. +// - selector: example.v1.Messaging.GetMessage +// get: /v1/messages/{message_id}/{sub.subfield} +// +// ## Special notes +// +// When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the +// proto to JSON conversion must follow the [proto3 +// specification](https://developers.google.com/protocol-buffers/docs/proto3#json). +// +// While the single segment variable follows the semantics of +// [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String +// Expansion, the multi segment variable **does not** follow RFC 6570 Section +// 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion +// does not expand special characters like `?` and `#`, which would lead +// to invalid URLs. As the result, gRPC Transcoding uses a custom encoding +// for multi segment variables. +// +// The path variables **must not** refer to any repeated or mapped field, +// because client libraries are not capable of handling such variable expansion. +// +// The path variables **must not** capture the leading "/" character. The reason +// is that the most common use case "{var}" does not capture the leading "/" +// character. For consistency, all path variables must share the same behavior. +// +// Repeated message fields must not be mapped to URL query parameters, because +// no client library can support such complicated mapping. +// +// If an API needs to use a JSON array for request or response body, it can map +// the request or response body to a repeated field. However, some gRPC +// Transcoding implementations may not support this feature. +message HttpRule { + // Selects a method to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax + // details. + string selector = 1; + + // Determines the URL pattern is matched by this rules. This pattern can be + // used with any of the {get|put|post|delete|patch} methods. A custom method + // can be defined using the 'custom' field. + oneof pattern { + // Maps to HTTP GET. Used for listing and getting information about + // resources. + string get = 2; + + // Maps to HTTP PUT. Used for replacing a resource. + string put = 3; + + // Maps to HTTP POST. Used for creating a resource or performing an action. + string post = 4; + + // Maps to HTTP DELETE. Used for deleting a resource. + string delete = 5; + + // Maps to HTTP PATCH. Used for updating a resource. + string patch = 6; + + // The custom pattern is used for specifying an HTTP method that is not + // included in the `pattern` field, such as HEAD, or "*" to leave the + // HTTP method unspecified for this rule. The wild-card rule is useful + // for services that provide content to Web (HTML) clients. + CustomHttpPattern custom = 8; + } + + // The name of the request field whose value is mapped to the HTTP request + // body, or `*` for mapping all request fields not captured by the path + // pattern to the HTTP body, or omitted for not having any HTTP request body. + // + // NOTE: the referred field must be present at the top-level of the request + // message type. + string body = 7; + + // Optional. The name of the response field whose value is mapped to the HTTP + // response body. When omitted, the entire response message will be used + // as the HTTP response body. + // + // NOTE: The referred field must be present at the top-level of the response + // message type. + string response_body = 12; + + // Additional HTTP bindings for the selector. Nested bindings must + // not contain an `additional_bindings` field themselves (that is, + // the nesting may only be one level deep). + repeated HttpRule additional_bindings = 11; +} + +// A custom pattern is used for defining custom HTTP verb. +message CustomHttpPattern { + // The name of this custom HTTP verb. + string kind = 1; + + // The path matched by this custom verb. + string path = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/httpbody.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/httpbody.proto new file mode 100644 index 000000000000..7f1685e8089c --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/httpbody.proto @@ -0,0 +1,81 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/protobuf/any.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/httpbody;httpbody"; +option java_multiple_files = true; +option java_outer_classname = "HttpBodyProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Message that represents an arbitrary HTTP body. It should only be used for +// payload formats that can't be represented as JSON, such as raw binary or +// an HTML page. +// +// +// This message can be used both in streaming and non-streaming API methods in +// the request as well as the response. +// +// It can be used as a top-level request field, which is convenient if one +// wants to extract parameters from either the URL or HTTP template into the +// request fields and also want access to the raw HTTP body. +// +// Example: +// +// message GetResourceRequest { +// // A unique request id. +// string request_id = 1; +// +// // The raw HTTP body is bound to this field. +// google.api.HttpBody http_body = 2; +// +// } +// +// service ResourceService { +// rpc GetResource(GetResourceRequest) +// returns (google.api.HttpBody); +// rpc UpdateResource(google.api.HttpBody) +// returns (google.protobuf.Empty); +// +// } +// +// Example with streaming methods: +// +// service CaldavService { +// rpc GetCalendar(stream google.api.HttpBody) +// returns (stream google.api.HttpBody); +// rpc UpdateCalendar(stream google.api.HttpBody) +// returns (stream google.api.HttpBody); +// +// } +// +// Use of this type only changes how the request and response bodies are +// handled, all other features will continue to work unchanged. +message HttpBody { + // The HTTP Content-Type header value specifying the content type of the body. + string content_type = 1; + + // The HTTP request/response body as raw binary. + bytes data = 2; + + // Application specific response metadata. Must be set in the first response + // for streaming APIs. + repeated google.protobuf.Any extensions = 3; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/label.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/label.proto new file mode 100644 index 000000000000..698f6bd4deed --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/label.proto @@ -0,0 +1,48 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/label;label"; +option java_multiple_files = true; +option java_outer_classname = "LabelProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// A description of a label. +message LabelDescriptor { + // Value types that can be used as label values. + enum ValueType { + // A variable-length string. This is the default. + STRING = 0; + + // Boolean; true or false. + BOOL = 1; + + // A 64-bit signed integer. + INT64 = 2; + } + + // The label key. + string key = 1; + + // The type of data that can be assigned to the label. + ValueType value_type = 2; + + // A human-readable description for the label. + string description = 3; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/launch_stage.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/launch_stage.proto new file mode 100644 index 000000000000..9802de79526a --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/launch_stage.proto @@ -0,0 +1,72 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api;api"; +option java_multiple_files = true; +option java_outer_classname = "LaunchStageProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// The launch stage as defined by [Google Cloud Platform +// Launch Stages](https://cloud.google.com/terms/launch-stages). +enum LaunchStage { + // Do not use this default value. + LAUNCH_STAGE_UNSPECIFIED = 0; + + // The feature is not yet implemented. Users can not use it. + UNIMPLEMENTED = 6; + + // Prelaunch features are hidden from users and are only visible internally. + PRELAUNCH = 7; + + // Early Access features are limited to a closed group of testers. To use + // these features, you must sign up in advance and sign a Trusted Tester + // agreement (which includes confidentiality provisions). These features may + // be unstable, changed in backward-incompatible ways, and are not + // guaranteed to be released. + EARLY_ACCESS = 1; + + // Alpha is a limited availability test for releases before they are cleared + // for widespread use. By Alpha, all significant design issues are resolved + // and we are in the process of verifying functionality. Alpha customers + // need to apply for access, agree to applicable terms, and have their + // projects allowlisted. Alpha releases don't have to be feature complete, + // no SLAs are provided, and there are no technical support obligations, but + // they will be far enough along that customers can actually use them in + // test environments or for limited-use tests -- just like they would in + // normal production cases. + ALPHA = 2; + + // Beta is the point at which we are ready to open a release for any + // customer to use. There are no SLA or technical support obligations in a + // Beta release. Products will be complete from a feature perspective, but + // may have some open outstanding issues. Beta releases are suitable for + // limited production use cases. + BETA = 3; + + // GA features are open to all developers and are considered stable and + // fully qualified for production use. + GA = 4; + + // Deprecated features are scheduled to be shut down and removed. For more + // information, see the "Deprecation Policy" section of our [Terms of + // Service](https://cloud.google.com/terms/) + // and the [Google Cloud Platform Subject to the Deprecation + // Policy](https://cloud.google.com/terms/deprecation) documentation. + DEPRECATED = 5; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/log.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/log.proto new file mode 100644 index 000000000000..416c4f6c2f06 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/log.proto @@ -0,0 +1,54 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/api/label.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "LogProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// A description of a log type. Example in YAML format: +// +// - name: library.googleapis.com/activity_history +// description: The history of borrowing and returning library items. +// display_name: Activity +// labels: +// - key: /customer_id +// description: Identifier of a library customer +message LogDescriptor { + // The name of the log. It must be less than 512 characters long and can + // include the following characters: upper- and lower-case alphanumeric + // characters [A-Za-z0-9], and punctuation characters including + // slash, underscore, hyphen, period [/_-.]. + string name = 1; + + // The set of labels that are available to describe a specific log entry. + // Runtime requests that contain labels not specified here are + // considered invalid. + repeated LabelDescriptor labels = 2; + + // A human-readable description of this log. This information appears in + // the documentation and can contain details. + string description = 3; + + // The human-readable name for this log. This information appears on + // the user interface and should be concise. + string display_name = 4; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/logging.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/logging.proto new file mode 100644 index 000000000000..650786fdedc5 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/logging.proto @@ -0,0 +1,81 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "LoggingProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Logging configuration of the service. +// +// The following example shows how to configure logs to be sent to the +// producer and consumer projects. In the example, the `activity_history` +// log is sent to both the producer and consumer projects, whereas the +// `purchase_history` log is only sent to the producer project. +// +// monitored_resources: +// - type: library.googleapis.com/branch +// labels: +// - key: /city +// description: The city where the library branch is located in. +// - key: /name +// description: The name of the branch. +// logs: +// - name: activity_history +// labels: +// - key: /customer_id +// - name: purchase_history +// logging: +// producer_destinations: +// - monitored_resource: library.googleapis.com/branch +// logs: +// - activity_history +// - purchase_history +// consumer_destinations: +// - monitored_resource: library.googleapis.com/branch +// logs: +// - activity_history +message Logging { + // Configuration of a specific logging destination (the producer project + // or the consumer project). + message LoggingDestination { + // The monitored resource type. The type must be defined in the + // [Service.monitored_resources][google.api.Service.monitored_resources] + // section. + string monitored_resource = 3; + + // Names of the logs to be sent to this destination. Each name must + // be defined in the [Service.logs][google.api.Service.logs] section. If the + // log name is not a domain scoped name, it will be automatically prefixed + // with the service name followed by "/". + repeated string logs = 1; + } + + // Logging configurations for sending logs to the producer project. + // There can be multiple producer destinations, each one must have a + // different monitored resource type. A log can be used in at most + // one producer destination. + repeated LoggingDestination producer_destinations = 1; + + // Logging configurations for sending logs to the consumer project. + // There can be multiple consumer destinations, each one must have a + // different monitored resource type. A log can be used in at most + // one consumer destination. + repeated LoggingDestination consumer_destinations = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/metric.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/metric.proto new file mode 100644 index 000000000000..9bf043c493eb --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/metric.proto @@ -0,0 +1,268 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/api/label.proto"; +import "google/api/launch_stage.proto"; +import "google/protobuf/duration.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/metric;metric"; +option java_multiple_files = true; +option java_outer_classname = "MetricProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Defines a metric type and its schema. Once a metric descriptor is created, +// deleting or altering it stops data collection and makes the metric type's +// existing data unusable. +// +message MetricDescriptor { + // The kind of measurement. It describes how the data is reported. + // For information on setting the start time and end time based on + // the MetricKind, see [TimeInterval][google.monitoring.v3.TimeInterval]. + enum MetricKind { + // Do not use this default value. + METRIC_KIND_UNSPECIFIED = 0; + + // An instantaneous measurement of a value. + GAUGE = 1; + + // The change in a value during a time interval. + DELTA = 2; + + // A value accumulated over a time interval. Cumulative + // measurements in a time series should have the same start time + // and increasing end times, until an event resets the cumulative + // value to zero and sets a new start time for the following + // points. + CUMULATIVE = 3; + } + + // The value type of a metric. + enum ValueType { + // Do not use this default value. + VALUE_TYPE_UNSPECIFIED = 0; + + // The value is a boolean. + // This value type can be used only if the metric kind is `GAUGE`. + BOOL = 1; + + // The value is a signed 64-bit integer. + INT64 = 2; + + // The value is a double precision floating point number. + DOUBLE = 3; + + // The value is a text string. + // This value type can be used only if the metric kind is `GAUGE`. + STRING = 4; + + // The value is a [`Distribution`][google.api.Distribution]. + DISTRIBUTION = 5; + + // The value is money. + MONEY = 6; + } + + // Additional annotations that can be used to guide the usage of a metric. + message MetricDescriptorMetadata { + // Deprecated. Must use the + // [MetricDescriptor.launch_stage][google.api.MetricDescriptor.launch_stage] + // instead. + LaunchStage launch_stage = 1 [deprecated = true]; + + // The sampling period of metric data points. For metrics which are written + // periodically, consecutive data points are stored at this time interval, + // excluding data loss due to errors. Metrics with a higher granularity have + // a smaller sampling period. + google.protobuf.Duration sample_period = 2; + + // The delay of data points caused by ingestion. Data points older than this + // age are guaranteed to be ingested and available to be read, excluding + // data loss due to errors. + google.protobuf.Duration ingest_delay = 3; + } + + // The resource name of the metric descriptor. + string name = 1; + + // The metric type, including its DNS name prefix. The type is not + // URL-encoded. All user-defined metric types have the DNS name + // `custom.googleapis.com` or `external.googleapis.com`. Metric types should + // use a natural hierarchical grouping. For example: + // + // "custom.googleapis.com/invoice/paid/amount" + // "external.googleapis.com/prometheus/up" + // "appengine.googleapis.com/http/server/response_latencies" + string type = 8; + + // The set of labels that can be used to describe a specific + // instance of this metric type. For example, the + // `appengine.googleapis.com/http/server/response_latencies` metric + // type has a label for the HTTP response code, `response_code`, so + // you can look at latencies for successful responses or just + // for responses that failed. + repeated LabelDescriptor labels = 2; + + // Whether the metric records instantaneous values, changes to a value, etc. + // Some combinations of `metric_kind` and `value_type` might not be supported. + MetricKind metric_kind = 3; + + // Whether the measurement is an integer, a floating-point number, etc. + // Some combinations of `metric_kind` and `value_type` might not be supported. + ValueType value_type = 4; + + // The units in which the metric value is reported. It is only applicable + // if the `value_type` is `INT64`, `DOUBLE`, or `DISTRIBUTION`. The `unit` + // defines the representation of the stored metric values. + // + // Different systems might scale the values to be more easily displayed (so a + // value of `0.02kBy` _might_ be displayed as `20By`, and a value of + // `3523kBy` _might_ be displayed as `3.5MBy`). However, if the `unit` is + // `kBy`, then the value of the metric is always in thousands of bytes, no + // matter how it might be displayed. + // + // If you want a custom metric to record the exact number of CPU-seconds used + // by a job, you can create an `INT64 CUMULATIVE` metric whose `unit` is + // `s{CPU}` (or equivalently `1s{CPU}` or just `s`). If the job uses 12,005 + // CPU-seconds, then the value is written as `12005`. + // + // Alternatively, if you want a custom metric to record data in a more + // granular way, you can create a `DOUBLE CUMULATIVE` metric whose `unit` is + // `ks{CPU}`, and then write the value `12.005` (which is `12005/1000`), + // or use `Kis{CPU}` and write `11.723` (which is `12005/1024`). + // + // The supported units are a subset of [The Unified Code for Units of + // Measure](https://unitsofmeasure.org/ucum.html) standard: + // + // **Basic units (UNIT)** + // + // * `bit` bit + // * `By` byte + // * `s` second + // * `min` minute + // * `h` hour + // * `d` day + // * `1` dimensionless + // + // **Prefixes (PREFIX)** + // + // * `k` kilo (10^3) + // * `M` mega (10^6) + // * `G` giga (10^9) + // * `T` tera (10^12) + // * `P` peta (10^15) + // * `E` exa (10^18) + // * `Z` zetta (10^21) + // * `Y` yotta (10^24) + // + // * `m` milli (10^-3) + // * `u` micro (10^-6) + // * `n` nano (10^-9) + // * `p` pico (10^-12) + // * `f` femto (10^-15) + // * `a` atto (10^-18) + // * `z` zepto (10^-21) + // * `y` yocto (10^-24) + // + // * `Ki` kibi (2^10) + // * `Mi` mebi (2^20) + // * `Gi` gibi (2^30) + // * `Ti` tebi (2^40) + // * `Pi` pebi (2^50) + // + // **Grammar** + // + // The grammar also includes these connectors: + // + // * `/` division or ratio (as an infix operator). For examples, + // `kBy/{email}` or `MiBy/10ms` (although you should almost never + // have `/s` in a metric `unit`; rates should always be computed at + // query time from the underlying cumulative or delta value). + // * `.` multiplication or composition (as an infix operator). For + // examples, `GBy.d` or `k{watt}.h`. + // + // The grammar for a unit is as follows: + // + // Expression = Component { "." Component } { "/" Component } ; + // + // Component = ( [ PREFIX ] UNIT | "%" ) [ Annotation ] + // | Annotation + // | "1" + // ; + // + // Annotation = "{" NAME "}" ; + // + // Notes: + // + // * `Annotation` is just a comment if it follows a `UNIT`. If the annotation + // is used alone, then the unit is equivalent to `1`. For examples, + // `{request}/s == 1/s`, `By{transmitted}/s == By/s`. + // * `NAME` is a sequence of non-blank printable ASCII characters not + // containing `{` or `}`. + // * `1` represents a unitary [dimensionless + // unit](https://en.wikipedia.org/wiki/Dimensionless_quantity) of 1, such + // as in `1/s`. It is typically used when none of the basic units are + // appropriate. For example, "new users per day" can be represented as + // `1/d` or `{new-users}/d` (and a metric value `5` would mean "5 new + // users). Alternatively, "thousands of page views per day" would be + // represented as `1000/d` or `k1/d` or `k{page_views}/d` (and a metric + // value of `5.3` would mean "5300 page views per day"). + // * `%` represents dimensionless value of 1/100, and annotates values giving + // a percentage (so the metric values are typically in the range of 0..100, + // and a metric value `3` means "3 percent"). + // * `10^2.%` indicates a metric contains a ratio, typically in the range + // 0..1, that will be multiplied by 100 and displayed as a percentage + // (so a metric value `0.03` means "3 percent"). + string unit = 5; + + // A detailed description of the metric, which can be used in documentation. + string description = 6; + + // A concise name for the metric, which can be displayed in user interfaces. + // Use sentence case without an ending period, for example "Request count". + // This field is optional but it is recommended to be set for any metrics + // associated with user-visible concepts, such as Quota. + string display_name = 7; + + // Optional. Metadata which can be used to guide usage of the metric. + MetricDescriptorMetadata metadata = 10; + + // Optional. The launch stage of the metric definition. + LaunchStage launch_stage = 12; + + // Read-only. If present, then a [time + // series][google.monitoring.v3.TimeSeries], which is identified partially by + // a metric type and a + // [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor], that + // is associated with this metric type can only be associated with one of the + // monitored resource types listed here. + repeated string monitored_resource_types = 13; +} + +// A specific metric, identified by specifying values for all of the +// labels of a [`MetricDescriptor`][google.api.MetricDescriptor]. +message Metric { + // An existing metric type, see + // [google.api.MetricDescriptor][google.api.MetricDescriptor]. For example, + // `custom.googleapis.com/invoice/paid/amount`. + string type = 3; + + // The set of label values that uniquely identify this metric. All + // labels listed in the `MetricDescriptor` must be assigned values. + map labels = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/monitored_resource.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/monitored_resource.proto new file mode 100644 index 000000000000..08bc39b1c230 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/monitored_resource.proto @@ -0,0 +1,130 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/api/label.proto"; +import "google/api/launch_stage.proto"; +import "google/protobuf/struct.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/monitoredres;monitoredres"; +option java_multiple_files = true; +option java_outer_classname = "MonitoredResourceProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// An object that describes the schema of a +// [MonitoredResource][google.api.MonitoredResource] object using a type name +// and a set of labels. For example, the monitored resource descriptor for +// Google Compute Engine VM instances has a type of +// `"gce_instance"` and specifies the use of the labels `"instance_id"` and +// `"zone"` to identify particular VM instances. +// +// Different APIs can support different monitored resource types. APIs generally +// provide a `list` method that returns the monitored resource descriptors used +// by the API. +// +message MonitoredResourceDescriptor { + // Optional. The resource name of the monitored resource descriptor: + // `"projects/{project_id}/monitoredResourceDescriptors/{type}"` where + // {type} is the value of the `type` field in this object and + // {project_id} is a project ID that provides API-specific context for + // accessing the type. APIs that do not use project information can use the + // resource name format `"monitoredResourceDescriptors/{type}"`. + string name = 5; + + // Required. The monitored resource type. For example, the type + // `"cloudsql_database"` represents databases in Google Cloud SQL. + // For a list of types, see [Monitored resource + // types](https://cloud.google.com/monitoring/api/resources) + // and [Logging resource + // types](https://cloud.google.com/logging/docs/api/v2/resource-list). + string type = 1; + + // Optional. A concise name for the monitored resource type that might be + // displayed in user interfaces. It should be a Title Cased Noun Phrase, + // without any article or other determiners. For example, + // `"Google Cloud SQL Database"`. + string display_name = 2; + + // Optional. A detailed description of the monitored resource type that might + // be used in documentation. + string description = 3; + + // Required. A set of labels used to describe instances of this monitored + // resource type. For example, an individual Google Cloud SQL database is + // identified by values for the labels `"database_id"` and `"zone"`. + repeated LabelDescriptor labels = 4; + + // Optional. The launch stage of the monitored resource definition. + LaunchStage launch_stage = 7; +} + +// An object representing a resource that can be used for monitoring, logging, +// billing, or other purposes. Examples include virtual machine instances, +// databases, and storage devices such as disks. The `type` field identifies a +// [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor] object +// that describes the resource's schema. Information in the `labels` field +// identifies the actual resource and its attributes according to the schema. +// For example, a particular Compute Engine VM instance could be represented by +// the following object, because the +// [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor] for +// `"gce_instance"` has labels +// `"project_id"`, `"instance_id"` and `"zone"`: +// +// { "type": "gce_instance", +// "labels": { "project_id": "my-project", +// "instance_id": "12345678901234", +// "zone": "us-central1-a" }} +message MonitoredResource { + // Required. The monitored resource type. This field must match + // the `type` field of a + // [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor] + // object. For example, the type of a Compute Engine VM instance is + // `gce_instance`. Some descriptors include the service name in the type; for + // example, the type of a Datastream stream is + // `datastream.googleapis.com/Stream`. + string type = 1; + + // Required. Values for all of the labels listed in the associated monitored + // resource descriptor. For example, Compute Engine VM instances use the + // labels `"project_id"`, `"instance_id"`, and `"zone"`. + map labels = 2; +} + +// Auxiliary metadata for a [MonitoredResource][google.api.MonitoredResource] +// object. [MonitoredResource][google.api.MonitoredResource] objects contain the +// minimum set of information to uniquely identify a monitored resource +// instance. There is some other useful auxiliary metadata. Monitoring and +// Logging use an ingestion pipeline to extract metadata for cloud resources of +// all types, and store the metadata in this message. +message MonitoredResourceMetadata { + // Output only. Values for predefined system metadata labels. + // System labels are a kind of metadata extracted by Google, including + // "machine_image", "vpc", "subnet_id", + // "security_group", "name", etc. + // System label values can be only strings, Boolean values, or a list of + // strings. For example: + // + // { "name": "my-test-instance", + // "security_group": ["a", "b", "c"], + // "spot_instance": false } + google.protobuf.Struct system_labels = 1; + + // Output only. A map of user-defined metadata labels. + map user_labels = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/monitoring.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/monitoring.proto new file mode 100644 index 000000000000..753703e54501 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/monitoring.proto @@ -0,0 +1,107 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "MonitoringProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Monitoring configuration of the service. +// +// The example below shows how to configure monitored resources and metrics +// for monitoring. In the example, a monitored resource and two metrics are +// defined. The `library.googleapis.com/book/returned_count` metric is sent +// to both producer and consumer projects, whereas the +// `library.googleapis.com/book/num_overdue` metric is only sent to the +// consumer project. +// +// monitored_resources: +// - type: library.googleapis.com/Branch +// display_name: "Library Branch" +// description: "A branch of a library." +// launch_stage: GA +// labels: +// - key: resource_container +// description: "The Cloud container (ie. project id) for the Branch." +// - key: location +// description: "The location of the library branch." +// - key: branch_id +// description: "The id of the branch." +// metrics: +// - name: library.googleapis.com/book/returned_count +// display_name: "Books Returned" +// description: "The count of books that have been returned." +// launch_stage: GA +// metric_kind: DELTA +// value_type: INT64 +// unit: "1" +// labels: +// - key: customer_id +// description: "The id of the customer." +// - name: library.googleapis.com/book/num_overdue +// display_name: "Books Overdue" +// description: "The current number of overdue books." +// launch_stage: GA +// metric_kind: GAUGE +// value_type: INT64 +// unit: "1" +// labels: +// - key: customer_id +// description: "The id of the customer." +// monitoring: +// producer_destinations: +// - monitored_resource: library.googleapis.com/Branch +// metrics: +// - library.googleapis.com/book/returned_count +// consumer_destinations: +// - monitored_resource: library.googleapis.com/Branch +// metrics: +// - library.googleapis.com/book/returned_count +// - library.googleapis.com/book/num_overdue +message Monitoring { + // Configuration of a specific monitoring destination (the producer project + // or the consumer project). + message MonitoringDestination { + // The monitored resource type. The type must be defined in + // [Service.monitored_resources][google.api.Service.monitored_resources] + // section. + string monitored_resource = 1; + + // Types of the metrics to report to this monitoring destination. + // Each type must be defined in + // [Service.metrics][google.api.Service.metrics] section. + repeated string metrics = 2; + } + + // Monitoring configurations for sending metrics to the producer project. + // There can be multiple producer destinations. A monitored resource type may + // appear in multiple monitoring destinations if different aggregations are + // needed for different sets of metrics associated with that monitored + // resource type. A monitored resource and metric pair may only be used once + // in the Monitoring configuration. + repeated MonitoringDestination producer_destinations = 1; + + // Monitoring configurations for sending metrics to the consumer project. + // There can be multiple consumer destinations. A monitored resource type may + // appear in multiple monitoring destinations if different aggregations are + // needed for different sets of metrics associated with that monitored + // resource type. A monitored resource and metric pair may only be used once + // in the Monitoring configuration. + repeated MonitoringDestination consumer_destinations = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/policy.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/policy.proto new file mode 100644 index 000000000000..dd202bc87239 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/policy.proto @@ -0,0 +1,85 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/protobuf/descriptor.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "PolicyProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Provides `google.api.field_policy` annotation at proto fields. +extend google.protobuf.FieldOptions { + // See [FieldPolicy][]. + FieldPolicy field_policy = 158361448; +} + +// Provides `google.api.method_policy` annotation at proto methods. +extend google.protobuf.MethodOptions { + // See [MethodPolicy][]. + MethodPolicy method_policy = 161893301; +} + +// Google API Policy Annotation +// +// This message defines a simple API policy annotation that can be used to +// annotate API request and response message fields with applicable policies. +// One field may have multiple applicable policies that must all be satisfied +// before a request can be processed. This policy annotation is used to +// generate the overall policy that will be used for automatic runtime +// policy enforcement and documentation generation. +message FieldPolicy { + // Selects one or more request or response message fields to apply this + // `FieldPolicy`. + // + // When a `FieldPolicy` is used in proto annotation, the selector must + // be left as empty. The service config generator will automatically fill + // the correct value. + // + // When a `FieldPolicy` is used in service config, the selector must be a + // comma-separated string with valid request or response field paths, + // such as "foo.bar" or "foo.bar,foo.baz". + string selector = 1; + + // Specifies the required permission(s) for the resource referred to by the + // field. It requires the field contains a valid resource reference, and + // the request must pass the permission checks to proceed. For example, + // "resourcemanager.projects.get". + string resource_permission = 2; + + // Specifies the resource type for the resource referred to by the field. + string resource_type = 3; +} + +// Defines policies applying to an RPC method. +message MethodPolicy { + // Selects a method to which these policies should be enforced, for example, + // "google.pubsub.v1.Subscriber.CreateSubscription". + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax + // details. + // + // NOTE: This field must not be set in the proto annotation. It will be + // automatically filled by the service config compiler . + string selector = 9; + + // Policies that are applicable to the request message. + repeated FieldPolicy request_policies = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/quota.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/quota.proto new file mode 100644 index 000000000000..7ccc102fc72d --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/quota.proto @@ -0,0 +1,184 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "QuotaProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Quota configuration helps to achieve fairness and budgeting in service +// usage. +// +// The metric based quota configuration works this way: +// - The service configuration defines a set of metrics. +// - For API calls, the quota.metric_rules maps methods to metrics with +// corresponding costs. +// - The quota.limits defines limits on the metrics, which will be used for +// quota checks at runtime. +// +// An example quota configuration in yaml format: +// +// quota: +// limits: +// +// - name: apiWriteQpsPerProject +// metric: library.googleapis.com/write_calls +// unit: "1/min/{project}" # rate limit for consumer projects +// values: +// STANDARD: 10000 +// +// +// (The metric rules bind all methods to the read_calls metric, +// except for the UpdateBook and DeleteBook methods. These two methods +// are mapped to the write_calls metric, with the UpdateBook method +// consuming at twice rate as the DeleteBook method.) +// metric_rules: +// - selector: "*" +// metric_costs: +// library.googleapis.com/read_calls: 1 +// - selector: google.example.library.v1.LibraryService.UpdateBook +// metric_costs: +// library.googleapis.com/write_calls: 2 +// - selector: google.example.library.v1.LibraryService.DeleteBook +// metric_costs: +// library.googleapis.com/write_calls: 1 +// +// Corresponding Metric definition: +// +// metrics: +// - name: library.googleapis.com/read_calls +// display_name: Read requests +// metric_kind: DELTA +// value_type: INT64 +// +// - name: library.googleapis.com/write_calls +// display_name: Write requests +// metric_kind: DELTA +// value_type: INT64 +// +// +message Quota { + // List of QuotaLimit definitions for the service. + repeated QuotaLimit limits = 3; + + // List of MetricRule definitions, each one mapping a selected method to one + // or more metrics. + repeated MetricRule metric_rules = 4; +} + +// Bind API methods to metrics. Binding a method to a metric causes that +// metric's configured quota behaviors to apply to the method call. +message MetricRule { + // Selects the methods to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax + // details. + string selector = 1; + + // Metrics to update when the selected methods are called, and the associated + // cost applied to each metric. + // + // The key of the map is the metric name, and the values are the amount + // increased for the metric against which the quota limits are defined. + // The value must not be negative. + map metric_costs = 2; +} + +// `QuotaLimit` defines a specific limit that applies over a specified duration +// for a limit type. There can be at most one limit for a duration and limit +// type combination defined within a `QuotaGroup`. +message QuotaLimit { + // Name of the quota limit. + // + // The name must be provided, and it must be unique within the service. The + // name can only include alphanumeric characters as well as '-'. + // + // The maximum length of the limit name is 64 characters. + string name = 6; + + // Optional. User-visible, extended description for this quota limit. + // Should be used only when more context is needed to understand this limit + // than provided by the limit's display name (see: `display_name`). + string description = 2; + + // Default number of tokens that can be consumed during the specified + // duration. This is the number of tokens assigned when a client + // application developer activates the service for his/her project. + // + // Specifying a value of 0 will block all requests. This can be used if you + // are provisioning quota to selected consumers and blocking others. + // Similarly, a value of -1 will indicate an unlimited quota. No other + // negative values are allowed. + // + // Used by group-based quotas only. + int64 default_limit = 3; + + // Maximum number of tokens that can be consumed during the specified + // duration. Client application developers can override the default limit up + // to this maximum. If specified, this value cannot be set to a value less + // than the default limit. If not specified, it is set to the default limit. + // + // To allow clients to apply overrides with no upper bound, set this to -1, + // indicating unlimited maximum quota. + // + // Used by group-based quotas only. + int64 max_limit = 4; + + // Free tier value displayed in the Developers Console for this limit. + // The free tier is the number of tokens that will be subtracted from the + // billed amount when billing is enabled. + // This field can only be set on a limit with duration "1d", in a billable + // group; it is invalid on any other limit. If this field is not set, it + // defaults to 0, indicating that there is no free tier for this service. + // + // Used by group-based quotas only. + int64 free_tier = 7; + + // Duration of this limit in textual notation. Must be "100s" or "1d". + // + // Used by group-based quotas only. + string duration = 5; + + // The name of the metric this quota limit applies to. The quota limits with + // the same metric will be checked together during runtime. The metric must be + // defined within the service config. + string metric = 8; + + // Specify the unit of the quota limit. It uses the same syntax as + // [Metric.unit][]. The supported unit kinds are determined by the quota + // backend system. + // + // Here are some examples: + // * "1/min/{project}" for quota per minute per project. + // + // Note: the order of unit components is insignificant. + // The "1" at the beginning is required to follow the metric unit syntax. + string unit = 9; + + // Tiered limit values. You must specify this as a key:value pair, with an + // integer value that is the maximum number of requests allowed for the + // specified unit. Currently only STANDARD is supported. + map values = 10; + + // User-visible display name for this limit. + // Optional. If not set, the UI will provide a default display name based on + // the quota configuration. This field can be used to override the default + // display name generated from the configuration. + string display_name = 12; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/resource.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/resource.proto new file mode 100644 index 000000000000..bf0cbec5debe --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/resource.proto @@ -0,0 +1,238 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/protobuf/descriptor.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "ResourceProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.FieldOptions { + // An annotation that describes a resource reference, see + // [ResourceReference][]. + google.api.ResourceReference resource_reference = 1055; +} + +extend google.protobuf.FileOptions { + // An annotation that describes a resource definition without a corresponding + // message; see [ResourceDescriptor][]. + repeated google.api.ResourceDescriptor resource_definition = 1053; +} + +extend google.protobuf.MessageOptions { + // An annotation that describes a resource definition, see + // [ResourceDescriptor][]. + google.api.ResourceDescriptor resource = 1053; +} + +// A simple descriptor of a resource type. +// +// ResourceDescriptor annotates a resource message (either by means of a +// protobuf annotation or use in the service config), and associates the +// resource's schema, the resource type, and the pattern of the resource name. +// +// Example: +// +// message Topic { +// // Indicates this message defines a resource schema. +// // Declares the resource type in the format of {service}/{kind}. +// // For Kubernetes resources, the format is {api group}/{kind}. +// option (google.api.resource) = { +// type: "pubsub.googleapis.com/Topic" +// pattern: "projects/{project}/topics/{topic}" +// }; +// } +// +// The ResourceDescriptor Yaml config will look like: +// +// resources: +// - type: "pubsub.googleapis.com/Topic" +// pattern: "projects/{project}/topics/{topic}" +// +// Sometimes, resources have multiple patterns, typically because they can +// live under multiple parents. +// +// Example: +// +// message LogEntry { +// option (google.api.resource) = { +// type: "logging.googleapis.com/LogEntry" +// pattern: "projects/{project}/logs/{log}" +// pattern: "folders/{folder}/logs/{log}" +// pattern: "organizations/{organization}/logs/{log}" +// pattern: "billingAccounts/{billing_account}/logs/{log}" +// }; +// } +// +// The ResourceDescriptor Yaml config will look like: +// +// resources: +// - type: 'logging.googleapis.com/LogEntry' +// pattern: "projects/{project}/logs/{log}" +// pattern: "folders/{folder}/logs/{log}" +// pattern: "organizations/{organization}/logs/{log}" +// pattern: "billingAccounts/{billing_account}/logs/{log}" +message ResourceDescriptor { + // A description of the historical or future-looking state of the + // resource pattern. + enum History { + // The "unset" value. + HISTORY_UNSPECIFIED = 0; + + // The resource originally had one pattern and launched as such, and + // additional patterns were added later. + ORIGINALLY_SINGLE_PATTERN = 1; + + // The resource has one pattern, but the API owner expects to add more + // later. (This is the inverse of ORIGINALLY_SINGLE_PATTERN, and prevents + // that from being necessary once there are multiple patterns.) + FUTURE_MULTI_PATTERN = 2; + } + + // A flag representing a specific style that a resource claims to conform to. + enum Style { + // The unspecified value. Do not use. + STYLE_UNSPECIFIED = 0; + + // This resource is intended to be "declarative-friendly". + // + // Declarative-friendly resources must be more strictly consistent, and + // setting this to true communicates to tools that this resource should + // adhere to declarative-friendly expectations. + // + // Note: This is used by the API linter (linter.aip.dev) to enable + // additional checks. + DECLARATIVE_FRIENDLY = 1; + } + + // The resource type. It must be in the format of + // {service_name}/{resource_type_kind}. The `resource_type_kind` must be + // singular and must not include version numbers. + // + // Example: `storage.googleapis.com/Bucket` + // + // The value of the resource_type_kind must follow the regular expression + // /[A-Za-z][a-zA-Z0-9]+/. It should start with an upper case character and + // should use PascalCase (UpperCamelCase). The maximum number of + // characters allowed for the `resource_type_kind` is 100. + string type = 1; + + // Optional. The relative resource name pattern associated with this resource + // type. The DNS prefix of the full resource name shouldn't be specified here. + // + // The path pattern must follow the syntax, which aligns with HTTP binding + // syntax: + // + // Template = Segment { "/" Segment } ; + // Segment = LITERAL | Variable ; + // Variable = "{" LITERAL "}" ; + // + // Examples: + // + // - "projects/{project}/topics/{topic}" + // - "projects/{project}/knowledgeBases/{knowledge_base}" + // + // The components in braces correspond to the IDs for each resource in the + // hierarchy. It is expected that, if multiple patterns are provided, + // the same component name (e.g. "project") refers to IDs of the same + // type of resource. + repeated string pattern = 2; + + // Optional. The field on the resource that designates the resource name + // field. If omitted, this is assumed to be "name". + string name_field = 3; + + // Optional. The historical or future-looking state of the resource pattern. + // + // Example: + // + // // The InspectTemplate message originally only supported resource + // // names with organization, and project was added later. + // message InspectTemplate { + // option (google.api.resource) = { + // type: "dlp.googleapis.com/InspectTemplate" + // pattern: + // "organizations/{organization}/inspectTemplates/{inspect_template}" + // pattern: "projects/{project}/inspectTemplates/{inspect_template}" + // history: ORIGINALLY_SINGLE_PATTERN + // }; + // } + History history = 4; + + // The plural name used in the resource name and permission names, such as + // 'projects' for the resource name of 'projects/{project}' and the permission + // name of 'cloudresourcemanager.googleapis.com/projects.get'. It is the same + // concept of the `plural` field in k8s CRD spec + // https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions/ + // + // Note: The plural form is required even for singleton resources. See + // https://aip.dev/156 + string plural = 5; + + // The same concept of the `singular` field in k8s CRD spec + // https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions/ + // Such as "project" for the `resourcemanager.googleapis.com/Project` type. + string singular = 6; + + // Style flag(s) for this resource. + // These indicate that a resource is expected to conform to a given + // style. See the specific style flags for additional information. + repeated Style style = 10; +} + +// Defines a proto annotation that describes a string field that refers to +// an API resource. +message ResourceReference { + // The resource type that the annotated field references. + // + // Example: + // + // message Subscription { + // string topic = 2 [(google.api.resource_reference) = { + // type: "pubsub.googleapis.com/Topic" + // }]; + // } + // + // Occasionally, a field may reference an arbitrary resource. In this case, + // APIs use the special value * in their resource reference. + // + // Example: + // + // message GetIamPolicyRequest { + // string resource = 2 [(google.api.resource_reference) = { + // type: "*" + // }]; + // } + string type = 1; + + // The resource type of a child collection that the annotated field + // references. This is useful for annotating the `parent` field that + // doesn't have a fixed resource type. + // + // Example: + // + // message ListLogEntriesRequest { + // string parent = 1 [(google.api.resource_reference) = { + // child_type: "logging.googleapis.com/LogEntry" + // }; + // } + string child_type = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/routing.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/routing.proto new file mode 100644 index 000000000000..b35289be8ea8 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/routing.proto @@ -0,0 +1,461 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/protobuf/descriptor.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "RoutingProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.MethodOptions { + // See RoutingRule. + google.api.RoutingRule routing = 72295729; +} + +// Specifies the routing information that should be sent along with the request +// in the form of routing header. +// **NOTE:** All service configuration rules follow the "last one wins" order. +// +// The examples below will apply to an RPC which has the following request type: +// +// Message Definition: +// +// message Request { +// // The name of the Table +// // Values can be of the following formats: +// // - `projects//tables/` +// // - `projects//instances//tables/
` +// // - `region//zones//tables/
` +// string table_name = 1; +// +// // This value specifies routing for replication. +// // It can be in the following formats: +// // - `profiles/` +// // - a legacy `profile_id` that can be any string +// string app_profile_id = 2; +// } +// +// Example message: +// +// { +// table_name: projects/proj_foo/instances/instance_bar/table/table_baz, +// app_profile_id: profiles/prof_qux +// } +// +// The routing header consists of one or multiple key-value pairs. Every key +// and value must be percent-encoded, and joined together in the format of +// `key1=value1&key2=value2`. +// In the examples below I am skipping the percent-encoding for readablity. +// +// Example 1 +// +// Extracting a field from the request to put into the routing header +// unchanged, with the key equal to the field name. +// +// annotation: +// +// option (google.api.routing) = { +// // Take the `app_profile_id`. +// routing_parameters { +// field: "app_profile_id" +// } +// }; +// +// result: +// +// x-goog-request-params: app_profile_id=profiles/prof_qux +// +// Example 2 +// +// Extracting a field from the request to put into the routing header +// unchanged, with the key different from the field name. +// +// annotation: +// +// option (google.api.routing) = { +// // Take the `app_profile_id`, but name it `routing_id` in the header. +// routing_parameters { +// field: "app_profile_id" +// path_template: "{routing_id=**}" +// } +// }; +// +// result: +// +// x-goog-request-params: routing_id=profiles/prof_qux +// +// Example 3 +// +// Extracting a field from the request to put into the routing +// header, while matching a path template syntax on the field's value. +// +// NB: it is more useful to send nothing than to send garbage for the purpose +// of dynamic routing, since garbage pollutes cache. Thus the matching. +// +// Sub-example 3a +// +// The field matches the template. +// +// annotation: +// +// option (google.api.routing) = { +// // Take the `table_name`, if it's well-formed (with project-based +// // syntax). +// routing_parameters { +// field: "table_name" +// path_template: "{table_name=projects/*/instances/*/**}" +// } +// }; +// +// result: +// +// x-goog-request-params: +// table_name=projects/proj_foo/instances/instance_bar/table/table_baz +// +// Sub-example 3b +// +// The field does not match the template. +// +// annotation: +// +// option (google.api.routing) = { +// // Take the `table_name`, if it's well-formed (with region-based +// // syntax). +// routing_parameters { +// field: "table_name" +// path_template: "{table_name=regions/*/zones/*/**}" +// } +// }; +// +// result: +// +// +// +// Sub-example 3c +// +// Multiple alternative conflictingly named path templates are +// specified. The one that matches is used to construct the header. +// +// annotation: +// +// option (google.api.routing) = { +// // Take the `table_name`, if it's well-formed, whether +// // using the region- or projects-based syntax. +// +// routing_parameters { +// field: "table_name" +// path_template: "{table_name=regions/*/zones/*/**}" +// } +// routing_parameters { +// field: "table_name" +// path_template: "{table_name=projects/*/instances/*/**}" +// } +// }; +// +// result: +// +// x-goog-request-params: +// table_name=projects/proj_foo/instances/instance_bar/table/table_baz +// +// Example 4 +// +// Extracting a single routing header key-value pair by matching a +// template syntax on (a part of) a single request field. +// +// annotation: +// +// option (google.api.routing) = { +// // Take just the project id from the `table_name` field. +// routing_parameters { +// field: "table_name" +// path_template: "{routing_id=projects/*}/**" +// } +// }; +// +// result: +// +// x-goog-request-params: routing_id=projects/proj_foo +// +// Example 5 +// +// Extracting a single routing header key-value pair by matching +// several conflictingly named path templates on (parts of) a single request +// field. The last template to match "wins" the conflict. +// +// annotation: +// +// option (google.api.routing) = { +// // If the `table_name` does not have instances information, +// // take just the project id for routing. +// // Otherwise take project + instance. +// +// routing_parameters { +// field: "table_name" +// path_template: "{routing_id=projects/*}/**" +// } +// routing_parameters { +// field: "table_name" +// path_template: "{routing_id=projects/*/instances/*}/**" +// } +// }; +// +// result: +// +// x-goog-request-params: +// routing_id=projects/proj_foo/instances/instance_bar +// +// Example 6 +// +// Extracting multiple routing header key-value pairs by matching +// several non-conflicting path templates on (parts of) a single request field. +// +// Sub-example 6a +// +// Make the templates strict, so that if the `table_name` does not +// have an instance information, nothing is sent. +// +// annotation: +// +// option (google.api.routing) = { +// // The routing code needs two keys instead of one composite +// // but works only for the tables with the "project-instance" name +// // syntax. +// +// routing_parameters { +// field: "table_name" +// path_template: "{project_id=projects/*}/instances/*/**" +// } +// routing_parameters { +// field: "table_name" +// path_template: "projects/*/{instance_id=instances/*}/**" +// } +// }; +// +// result: +// +// x-goog-request-params: +// project_id=projects/proj_foo&instance_id=instances/instance_bar +// +// Sub-example 6b +// +// Make the templates loose, so that if the `table_name` does not +// have an instance information, just the project id part is sent. +// +// annotation: +// +// option (google.api.routing) = { +// // The routing code wants two keys instead of one composite +// // but will work with just the `project_id` for tables without +// // an instance in the `table_name`. +// +// routing_parameters { +// field: "table_name" +// path_template: "{project_id=projects/*}/**" +// } +// routing_parameters { +// field: "table_name" +// path_template: "projects/*/{instance_id=instances/*}/**" +// } +// }; +// +// result (is the same as 6a for our example message because it has the instance +// information): +// +// x-goog-request-params: +// project_id=projects/proj_foo&instance_id=instances/instance_bar +// +// Example 7 +// +// Extracting multiple routing header key-value pairs by matching +// several path templates on multiple request fields. +// +// NB: note that here there is no way to specify sending nothing if one of the +// fields does not match its template. E.g. if the `table_name` is in the wrong +// format, the `project_id` will not be sent, but the `routing_id` will be. +// The backend routing code has to be aware of that and be prepared to not +// receive a full complement of keys if it expects multiple. +// +// annotation: +// +// option (google.api.routing) = { +// // The routing needs both `project_id` and `routing_id` +// // (from the `app_profile_id` field) for routing. +// +// routing_parameters { +// field: "table_name" +// path_template: "{project_id=projects/*}/**" +// } +// routing_parameters { +// field: "app_profile_id" +// path_template: "{routing_id=**}" +// } +// }; +// +// result: +// +// x-goog-request-params: +// project_id=projects/proj_foo&routing_id=profiles/prof_qux +// +// Example 8 +// +// Extracting a single routing header key-value pair by matching +// several conflictingly named path templates on several request fields. The +// last template to match "wins" the conflict. +// +// annotation: +// +// option (google.api.routing) = { +// // The `routing_id` can be a project id or a region id depending on +// // the table name format, but only if the `app_profile_id` is not set. +// // If `app_profile_id` is set it should be used instead. +// +// routing_parameters { +// field: "table_name" +// path_template: "{routing_id=projects/*}/**" +// } +// routing_parameters { +// field: "table_name" +// path_template: "{routing_id=regions/*}/**" +// } +// routing_parameters { +// field: "app_profile_id" +// path_template: "{routing_id=**}" +// } +// }; +// +// result: +// +// x-goog-request-params: routing_id=profiles/prof_qux +// +// Example 9 +// +// Bringing it all together. +// +// annotation: +// +// option (google.api.routing) = { +// // For routing both `table_location` and a `routing_id` are needed. +// // +// // table_location can be either an instance id or a region+zone id. +// // +// // For `routing_id`, take the value of `app_profile_id` +// // - If it's in the format `profiles/`, send +// // just the `` part. +// // - If it's any other literal, send it as is. +// // If the `app_profile_id` is empty, and the `table_name` starts with +// // the project_id, send that instead. +// +// routing_parameters { +// field: "table_name" +// path_template: "projects/*/{table_location=instances/*}/tables/*" +// } +// routing_parameters { +// field: "table_name" +// path_template: "{table_location=regions/*/zones/*}/tables/*" +// } +// routing_parameters { +// field: "table_name" +// path_template: "{routing_id=projects/*}/**" +// } +// routing_parameters { +// field: "app_profile_id" +// path_template: "{routing_id=**}" +// } +// routing_parameters { +// field: "app_profile_id" +// path_template: "profiles/{routing_id=*}" +// } +// }; +// +// result: +// +// x-goog-request-params: +// table_location=instances/instance_bar&routing_id=prof_qux +message RoutingRule { + // A collection of Routing Parameter specifications. + // **NOTE:** If multiple Routing Parameters describe the same key + // (via the `path_template` field or via the `field` field when + // `path_template` is not provided), "last one wins" rule + // determines which Parameter gets used. + // See the examples for more details. + repeated RoutingParameter routing_parameters = 2; +} + +// A projection from an input message to the GRPC or REST header. +message RoutingParameter { + // A request field to extract the header key-value pair from. + string field = 1; + + // A pattern matching the key-value field. Optional. + // If not specified, the whole field specified in the `field` field will be + // taken as value, and its name used as key. If specified, it MUST contain + // exactly one named segment (along with any number of unnamed segments) The + // pattern will be matched over the field specified in the `field` field, then + // if the match is successful: + // - the name of the single named segment will be used as a header name, + // - the match value of the segment will be used as a header value; + // if the match is NOT successful, nothing will be sent. + // + // Example: + // + // -- This is a field in the request message + // | that the header value will be extracted from. + // | + // | -- This is the key name in the + // | | routing header. + // V | + // field: "table_name" v + // path_template: "projects/*/{table_location=instances/*}/tables/*" + // ^ ^ + // | | + // In the {} brackets is the pattern that -- | + // specifies what to extract from the | + // field as a value to be sent. | + // | + // The string in the field must match the whole pattern -- + // before brackets, inside brackets, after brackets. + // + // When looking at this specific example, we can see that: + // - A key-value pair with the key `table_location` + // and the value matching `instances/*` should be added + // to the x-goog-request-params routing header. + // - The value is extracted from the request message's `table_name` field + // if it matches the full pattern specified: + // `projects/*/instances/*/tables/*`. + // + // **NB:** If the `path_template` field is not provided, the key name is + // equal to the field name, and the whole field should be sent as a value. + // This makes the pattern for the field and the value functionally equivalent + // to `**`, and the configuration + // + // { + // field: "table_name" + // } + // + // is a functionally equivalent shorthand to: + // + // { + // field: "table_name" + // path_template: "{table_name=**}" + // } + // + // See Example 1 for more details. + string path_template = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/service.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/service.proto new file mode 100644 index 000000000000..3de5b6675864 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/service.proto @@ -0,0 +1,191 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/api/auth.proto"; +import "google/api/backend.proto"; +import "google/api/billing.proto"; +import "google/api/client.proto"; +import "google/api/context.proto"; +import "google/api/control.proto"; +import "google/api/documentation.proto"; +import "google/api/endpoint.proto"; +import "google/api/http.proto"; +import "google/api/log.proto"; +import "google/api/logging.proto"; +import "google/api/metric.proto"; +import "google/api/monitored_resource.proto"; +import "google/api/monitoring.proto"; +import "google/api/quota.proto"; +import "google/api/source_info.proto"; +import "google/api/system_parameter.proto"; +import "google/api/usage.proto"; +import "google/protobuf/api.proto"; +import "google/protobuf/type.proto"; +import "google/protobuf/wrappers.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "ServiceProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// `Service` is the root object of Google API service configuration (service +// config). It describes the basic information about a logical service, +// such as the service name and the user-facing title, and delegates other +// aspects to sub-sections. Each sub-section is either a proto message or a +// repeated proto message that configures a specific aspect, such as auth. +// For more information, see each proto message definition. +// +// Example: +// +// type: google.api.Service +// name: calendar.googleapis.com +// title: Google Calendar API +// apis: +// - name: google.calendar.v3.Calendar +// +// visibility: +// rules: +// - selector: "google.calendar.v3.*" +// restriction: PREVIEW +// backend: +// rules: +// - selector: "google.calendar.v3.*" +// address: calendar.example.com +// +// authentication: +// providers: +// - id: google_calendar_auth +// jwks_uri: https://www.googleapis.com/oauth2/v1/certs +// issuer: https://securetoken.google.com +// rules: +// - selector: "*" +// requirements: +// provider_id: google_calendar_auth +message Service { + // The service name, which is a DNS-like logical identifier for the + // service, such as `calendar.googleapis.com`. The service name + // typically goes through DNS verification to make sure the owner + // of the service also owns the DNS name. + string name = 1; + + // The product title for this service, it is the name displayed in Google + // Cloud Console. + string title = 2; + + // The Google project that owns this service. + string producer_project_id = 22; + + // A unique ID for a specific instance of this message, typically assigned + // by the client for tracking purpose. Must be no longer than 63 characters + // and only lower case letters, digits, '.', '_' and '-' are allowed. If + // empty, the server may choose to generate one instead. + string id = 33; + + // A list of API interfaces exported by this service. Only the `name` field + // of the [google.protobuf.Api][google.protobuf.Api] needs to be provided by + // the configuration author, as the remaining fields will be derived from the + // IDL during the normalization process. It is an error to specify an API + // interface here which cannot be resolved against the associated IDL files. + repeated google.protobuf.Api apis = 3; + + // A list of all proto message types included in this API service. + // Types referenced directly or indirectly by the `apis` are automatically + // included. Messages which are not referenced but shall be included, such as + // types used by the `google.protobuf.Any` type, should be listed here by + // name by the configuration author. Example: + // + // types: + // - name: google.protobuf.Int32 + repeated google.protobuf.Type types = 4; + + // A list of all enum types included in this API service. Enums referenced + // directly or indirectly by the `apis` are automatically included. Enums + // which are not referenced but shall be included should be listed here by + // name by the configuration author. Example: + // + // enums: + // - name: google.someapi.v1.SomeEnum + repeated google.protobuf.Enum enums = 5; + + // Additional API documentation. + Documentation documentation = 6; + + // API backend configuration. + Backend backend = 8; + + // HTTP configuration. + Http http = 9; + + // Quota configuration. + Quota quota = 10; + + // Auth configuration. + Authentication authentication = 11; + + // Context configuration. + Context context = 12; + + // Configuration controlling usage of this service. + Usage usage = 15; + + // Configuration for network endpoints. If this is empty, then an endpoint + // with the same name as the service is automatically generated to service all + // defined APIs. + repeated Endpoint endpoints = 18; + + // Configuration for the service control plane. + Control control = 21; + + // Defines the logs used by this service. + repeated LogDescriptor logs = 23; + + // Defines the metrics used by this service. + repeated MetricDescriptor metrics = 24; + + // Defines the monitored resources used by this service. This is required + // by the [Service.monitoring][google.api.Service.monitoring] and + // [Service.logging][google.api.Service.logging] configurations. + repeated MonitoredResourceDescriptor monitored_resources = 25; + + // Billing configuration. + Billing billing = 26; + + // Logging configuration. + Logging logging = 27; + + // Monitoring configuration. + Monitoring monitoring = 28; + + // System parameter configuration. + SystemParameters system_parameters = 29; + + // Output only. The source information for this configuration if available. + SourceInfo source_info = 37; + + // Settings for [Google Cloud Client + // libraries](https://cloud.google.com/apis/docs/cloud-client-libraries) + // generated from APIs defined as protocol buffers. + Publishing publishing = 45; + + // Obsolete. Do not use. + // + // This field has no semantic meaning. The service config compiler always + // sets this field to `3`. + google.protobuf.UInt32Value config_version = 20; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/source_info.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/source_info.proto new file mode 100644 index 000000000000..51fe27901f54 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/source_info.proto @@ -0,0 +1,31 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/protobuf/any.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "SourceInfoProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Source information used to create a Service Config +message SourceInfo { + // All files used during config generation. + repeated google.protobuf.Any source_files = 1; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/system_parameter.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/system_parameter.proto new file mode 100644 index 000000000000..8d29057f7704 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/system_parameter.proto @@ -0,0 +1,96 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "SystemParameterProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// ### System parameter configuration +// +// A system parameter is a special kind of parameter defined by the API +// system, not by an individual API. It is typically mapped to an HTTP header +// and/or a URL query parameter. This configuration specifies which methods +// change the names of the system parameters. +message SystemParameters { + // Define system parameters. + // + // The parameters defined here will override the default parameters + // implemented by the system. If this field is missing from the service + // config, default system parameters will be used. Default system parameters + // and names is implementation-dependent. + // + // Example: define api key for all methods + // + // system_parameters + // rules: + // - selector: "*" + // parameters: + // - name: api_key + // url_query_parameter: api_key + // + // + // Example: define 2 api key names for a specific method. + // + // system_parameters + // rules: + // - selector: "/ListShelves" + // parameters: + // - name: api_key + // http_header: Api-Key1 + // - name: api_key + // http_header: Api-Key2 + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated SystemParameterRule rules = 1; +} + +// Define a system parameter rule mapping system parameter definitions to +// methods. +message SystemParameterRule { + // Selects the methods to which this rule applies. Use '*' to indicate all + // methods in all APIs. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax + // details. + string selector = 1; + + // Define parameters. Multiple names may be defined for a parameter. + // For a given method call, only one of them should be used. If multiple + // names are used the behavior is implementation-dependent. + // If none of the specified names are present the behavior is + // parameter-dependent. + repeated SystemParameter parameters = 2; +} + +// Define a parameter's name and location. The parameter may be passed as either +// an HTTP header or a URL query parameter, and if both are passed the behavior +// is implementation-dependent. +message SystemParameter { + // Define the name of the parameter, such as "api_key" . It is case sensitive. + string name = 1; + + // Define the HTTP header name to use for the parameter. It is case + // insensitive. + string http_header = 2; + + // Define the URL query parameter name to use for the parameter. It is case + // sensitive. + string url_query_parameter = 3; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/usage.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/usage.proto new file mode 100644 index 000000000000..b9384b44aedf --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/usage.proto @@ -0,0 +1,96 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "UsageProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Configuration controlling usage of a service. +message Usage { + // Requirements that must be satisfied before a consumer project can use the + // service. Each requirement is of the form /; + // for example 'serviceusage.googleapis.com/billing-enabled'. + // + // For Google APIs, a Terms of Service requirement must be included here. + // Google Cloud APIs must include "serviceusage.googleapis.com/tos/cloud". + // Other Google APIs should include + // "serviceusage.googleapis.com/tos/universal". Additional ToS can be + // included based on the business needs. + repeated string requirements = 1; + + // A list of usage rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated UsageRule rules = 6; + + // The full resource name of a channel used for sending notifications to the + // service producer. + // + // Google Service Management currently only supports + // [Google Cloud Pub/Sub](https://cloud.google.com/pubsub) as a notification + // channel. To use Google Cloud Pub/Sub as the channel, this must be the name + // of a Cloud Pub/Sub topic that uses the Cloud Pub/Sub topic name format + // documented in https://cloud.google.com/pubsub/docs/overview. + string producer_notification_channel = 7; +} + +// Usage configuration rules for the service. +// +// NOTE: Under development. +// +// +// Use this rule to configure unregistered calls for the service. Unregistered +// calls are calls that do not contain consumer project identity. +// (Example: calls that do not contain an API key). +// By default, API methods do not allow unregistered calls, and each method call +// must be identified by a consumer project identity. Use this rule to +// allow/disallow unregistered calls. +// +// Example of an API that wants to allow unregistered calls for entire service. +// +// usage: +// rules: +// - selector: "*" +// allow_unregistered_calls: true +// +// Example of a method that wants to allow unregistered calls. +// +// usage: +// rules: +// - selector: "google.example.library.v1.LibraryService.CreateBook" +// allow_unregistered_calls: true +message UsageRule { + // Selects the methods to which this rule applies. Use '*' to indicate all + // methods in all APIs. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax + // details. + string selector = 1; + + // If true, the selected method allows unregistered calls, e.g. calls + // that don't identify any user or application. + bool allow_unregistered_calls = 2; + + // If true, the selected method should skip service control and the control + // plane features, such as quota and billing, will not be available. + // This flag is used by Google Cloud Endpoints to bypass checks for internal + // methods, such as service health check methods. + bool skip_service_control = 3; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/visibility.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/visibility.proto new file mode 100644 index 000000000000..8b1f946fd16f --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/api/visibility.proto @@ -0,0 +1,113 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/protobuf/descriptor.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/visibility;visibility"; +option java_multiple_files = true; +option java_outer_classname = "VisibilityProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.EnumOptions { + // See `VisibilityRule`. + google.api.VisibilityRule enum_visibility = 72295727; +} + +extend google.protobuf.EnumValueOptions { + // See `VisibilityRule`. + google.api.VisibilityRule value_visibility = 72295727; +} + +extend google.protobuf.FieldOptions { + // See `VisibilityRule`. + google.api.VisibilityRule field_visibility = 72295727; +} + +extend google.protobuf.MessageOptions { + // See `VisibilityRule`. + google.api.VisibilityRule message_visibility = 72295727; +} + +extend google.protobuf.MethodOptions { + // See `VisibilityRule`. + google.api.VisibilityRule method_visibility = 72295727; +} + +extend google.protobuf.ServiceOptions { + // See `VisibilityRule`. + google.api.VisibilityRule api_visibility = 72295727; +} + +// `Visibility` restricts service consumer's access to service elements, +// such as whether an application can call a visibility-restricted method. +// The restriction is expressed by applying visibility labels on service +// elements. The visibility labels are elsewhere linked to service consumers. +// +// A service can define multiple visibility labels, but a service consumer +// should be granted at most one visibility label. Multiple visibility +// labels for a single service consumer are not supported. +// +// If an element and all its parents have no visibility label, its visibility +// is unconditionally granted. +// +// Example: +// +// visibility: +// rules: +// - selector: google.calendar.Calendar.EnhancedSearch +// restriction: PREVIEW +// - selector: google.calendar.Calendar.Delegate +// restriction: INTERNAL +// +// Here, all methods are publicly visible except for the restricted methods +// EnhancedSearch and Delegate. +message Visibility { + // A list of visibility rules that apply to individual API elements. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated VisibilityRule rules = 1; +} + +// A visibility rule provides visibility configuration for an individual API +// element. +message VisibilityRule { + // Selects methods, messages, fields, enums, etc. to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax + // details. + string selector = 1; + + // A comma-separated list of visibility labels that apply to the `selector`. + // Any of the listed labels can be used to grant the visibility. + // + // If a rule has multiple labels, removing one of the labels but not all of + // them can break clients. + // + // Example: + // + // visibility: + // rules: + // - selector: google.calendar.Calendar.EnhancedSearch + // restriction: INTERNAL, PREVIEW + // + // Removing INTERNAL from this restriction will break clients that rely on + // this method and only had access to it through INTERNAL. + string restriction = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/apps/card/v1/card.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/apps/card/v1/card.proto new file mode 100644 index 000000000000..538a785b3706 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/apps/card/v1/card.proto @@ -0,0 +1,1918 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.apps.card.v1; + +import "google/type/color.proto"; + +option csharp_namespace = "Google.Apps.Card.V1"; +option go_package = "google.golang.org/genproto/googleapis/apps/card/v1;card"; +option java_multiple_files = true; +option java_outer_classname = "CardProto"; +option java_package = "com.google.apps.card.v1"; +option php_namespace = "Google\\Apps\\Card\\V1"; +option ruby_package = "Google::Apps::Card::V1"; + +// A card interface displayed in a Google Chat message or Google Workspace +// Add-on. +// +// Cards support a defined layout, interactive UI elements like buttons, and +// rich media like images. Use cards to present detailed information, +// gather information from users, and guide users to take a next step. +// +// [Card builder](https://addons.gsuite.google.com/uikit/builder) +// +// To learn how +// to build cards, see the following documentation: +// +// * For Google Chat apps, see [Design dynamic, interactive, and consistent UIs +// with cards](https://developers.google.com/chat/ui). +// * For Google Workspace Add-ons, see [Card-based +// interfaces](https://developers.google.com/apps-script/add-ons/concepts/cards). +// +// **Example: Card message for a Google Chat app** +// +// ![Example contact +// card](https://developers.google.com/chat/images/card_api_reference.png) +// +// To create the sample card message in Google Chat, use the following JSON: +// +// ``` +// { +// "cardsV2": [ +// { +// "cardId": "unique-card-id", +// "card": { +// "header": { +// "title": "Sasha", +// "subtitle": "Software Engineer", +// "imageUrl": +// "https://developers.google.com/chat/images/quickstart-app-avatar.png", +// "imageType": "CIRCLE", +// "imageAltText": "Avatar for Sasha", +// }, +// "sections": [ +// { +// "header": "Contact Info", +// "collapsible": true, +// "uncollapsibleWidgetsCount": 1, +// "widgets": [ +// { +// "decoratedText": { +// "startIcon": { +// "knownIcon": "EMAIL", +// }, +// "text": "sasha@example.com", +// } +// }, +// { +// "decoratedText": { +// "startIcon": { +// "knownIcon": "PERSON", +// }, +// "text": "Online", +// }, +// }, +// { +// "decoratedText": { +// "startIcon": { +// "knownIcon": "PHONE", +// }, +// "text": "+1 (555) 555-1234", +// } +// }, +// { +// "buttonList": { +// "buttons": [ +// { +// "text": "Share", +// "onClick": { +// "openLink": { +// "url": "https://example.com/share", +// } +// } +// }, +// { +// "text": "Edit", +// "onClick": { +// "action": { +// "function": "goToView", +// "parameters": [ +// { +// "key": "viewType", +// "value": "EDIT", +// } +// ], +// } +// } +// }, +// ], +// } +// }, +// ], +// }, +// ], +// }, +// } +// ], +// } +// ``` +message Card { + // Represents a card header. For an example in Google Chat apps, see [Card + // header](https://developers.google.com/chat/ui/widgets/card-header). + // + // [Google Workspace Add-ons and Chat + // apps](https://developers.google.com/workspace/extend): + message CardHeader { + // Required. The title of the card header. + // The header has a fixed height: if both a + // title and subtitle are specified, each takes up one line. If only the + // title is specified, it takes up both lines. + string title = 1; + + // The subtitle of the card header. If specified, appears on its own line + // below the `title`. + string subtitle = 2; + + // The shape used to crop the image. + // + // [Google Workspace Add-ons and Chat + // apps](https://developers.google.com/workspace/extend): + Widget.ImageType image_type = 3; + + // The HTTPS URL of the image in the card header. + string image_url = 4; + + // The alternative text of this image that's used for accessibility. + string image_alt_text = 5; + } + + // A section contains a collection of widgets that are rendered + // vertically in the order that they're specified. + // + // [Google Workspace Add-ons and Chat + // apps](https://developers.google.com/workspace/extend): + message Section { + // Text that appears at the top of a section. + // Supports simple HTML formatted text. For more information + // about formatting text, see + // [Formatting text in Google Chat + // apps](https://developers.google.com/chat/format-messages#card-formatting) + // and + // [Formatting + // text in Google Workspace + // Add-ons](https://developers.google.com/apps-script/add-ons/concepts/widgets#text_formatting). + string header = 1; + + // All the widgets in the section. + // Must contain at least one widget. + repeated Widget widgets = 2; + + // Indicates whether this section is collapsible. + // + // Collapsible sections hide some or all widgets, but users can expand the + // section to reveal the hidden widgets by clicking **Show more**. Users + // can hide the widgets again by clicking **Show less**. + // + // To determine which widgets are hidden, specify + // `uncollapsibleWidgetsCount`. + bool collapsible = 5; + + // The number of uncollapsible widgets which remain visible even when a + // section is collapsed. + // + // For example, when a section + // contains five widgets and the `uncollapsibleWidgetsCount` is set to `2`, + // the first two widgets are always shown and the last three are collapsed + // by default. The `uncollapsibleWidgetsCount` is taken into account only + // when `collapsible` is `true`. + int32 uncollapsible_widgets_count = 6; + } + + // The divider style of a card. Currently only used for dividers betweens card + // sections. + // + // [Google Workspace Add-ons and Chat + // apps](https://developers.google.com/workspace/extend): + enum DividerStyle { + // Don't use. Unspecified. + DIVIDER_STYLE_UNSPECIFIED = 0; + + // Default option. Render a solid divider between sections. + SOLID_DIVIDER = 1; + + // If set, no divider is rendered between sections. + NO_DIVIDER = 2; + } + + // A card action is the action associated with the card. For example, + // an invoice card might include actions such as delete invoice, email + // invoice, or open the invoice in a browser. + // + // [Google Workspace + // Add-ons](https://developers.google.com/workspace/add-ons): + message CardAction { + // The label that displays as the action menu item. + string action_label = 1; + + // The `onClick` action for this action item. + OnClick on_click = 2; + } + + // A persistent (sticky) footer that that appears at the bottom of the card. + // + // Setting `fixedFooter` without specifying a `primaryButton` or a + // `secondaryButton` causes an error. + // + // For Chat apps, you can use fixed footers in + // [dialogs](https://developers.google.com/chat/how-tos/dialogs), but not + // [card + // messages](https://developers.google.com/chat/api/guides/v1/messages/create#create). + // For an example in Google Chat apps, see [Card + // footer](https://developers.google.com/chat/ui/widgets/card-fixed-footer). + // + // [Google Workspace Add-ons and Chat + // apps](https://developers.google.com/workspace/extend): + message CardFixedFooter { + // The primary button of the fixed footer. The button must be a text button + // with text and color set. + Button primary_button = 1; + + // The secondary button of the fixed footer. The button must be a text + // button with text and color set. + // If `secondaryButton` is set, you must also set `primaryButton`. + Button secondary_button = 2; + } + + // In Google Workspace Add-ons, + // determines how a card is displayed. + // + // [Google Workspace + // Add-ons](https://developers.google.com/workspace/add-ons): + enum DisplayStyle { + // Don't use. Unspecified. + DISPLAY_STYLE_UNSPECIFIED = 0; + + // The header of the card appears at the bottom of the + // sidebar, partially covering the current top card of the stack. Clicking + // the header pops the card into the card stack. If the card has no header, + // a generated header is used instead. + PEEK = 1; + + // Default value. The card is shown by replacing the view of the top card in + // the card stack. + REPLACE = 2; + } + + // The header of the card. A header usually contains a leading image and a + // title. Headers always appear at the top of a card. + CardHeader header = 1; + + // Contains a collection of widgets. Each section has its own, optional + // header. Sections are visually separated by a line divider. For an example + // in Google Chat apps, see [Card + // section](https://developers.google.com/chat/ui/widgets/card-section). + repeated Section sections = 2; + + // The divider style between sections. + DividerStyle section_divider_style = 9; + + // The card's actions. Actions are added to the card's toolbar menu. + // + // [Google Workspace + // Add-ons](https://developers.google.com/workspace/add-ons): + // + // For example, the following JSON constructs a card action menu with + // `Settings` and `Send Feedback` options: + // + // ``` + // "card_actions": [ + // { + // "actionLabel": "Settings", + // "onClick": { + // "action": { + // "functionName": "goToView", + // "parameters": [ + // { + // "key": "viewType", + // "value": "SETTING" + // } + // ], + // "loadIndicator": "LoadIndicator.SPINNER" + // } + // } + // }, + // { + // "actionLabel": "Send Feedback", + // "onClick": { + // "openLink": { + // "url": "https://example.com/feedback" + // } + // } + // } + // ] + // ``` + repeated CardAction card_actions = 3; + + // Name of the card. Used as a card identifier in card navigation. + // + // [Google Workspace + // Add-ons](https://developers.google.com/workspace/add-ons): + string name = 4; + + // The fixed footer shown at the bottom of this card. + // + // Setting `fixedFooter` without specifying a `primaryButton` or a + // `secondaryButton` causes an error. For Chat apps, you can use fixed footers + // in + // [dialogs](https://developers.google.com/chat/how-tos/dialogs), but not + // [card + // messages](https://developers.google.com/chat/api/guides/v1/messages/create#create). + // + // [Google Workspace Add-ons and Chat + // apps](https://developers.google.com/workspace/extend): + CardFixedFooter fixed_footer = 5; + + // In Google Workspace Add-ons, sets the display properties of the + // `peekCardHeader`. + // + // [Google Workspace + // Add-ons](https://developers.google.com/workspace/add-ons): + DisplayStyle display_style = 6; + + // When displaying contextual content, the peek card header acts as a + // placeholder so that the user can navigate forward between the homepage + // cards and the contextual cards. + // + // [Google Workspace + // Add-ons](https://developers.google.com/workspace/add-ons): + CardHeader peek_card_header = 7; +} + +// Each card is made up of widgets. +// +// A widget is a composite object that can represent one of text, images, +// buttons, and other object types. +message Widget { + // The shape used to crop the image. + // + // [Google Workspace Add-ons and Chat + // apps](https://developers.google.com/workspace/extend): + enum ImageType { + // Default value. Applies a square mask to the image. For example, a 4x3 + // image becomes 3x3. + SQUARE = 0; + + // Applies a circular mask to the image. For example, a 4x3 image becomes a + // circle with a diameter of 3. + CIRCLE = 1; + } + + // Specifies whether widgets align to the left, right, or center of a column. + // + // [Google Chat apps](https://developers.google.com/chat): + enum HorizontalAlignment { + // Don't use. Unspecified. + HORIZONTAL_ALIGNMENT_UNSPECIFIED = 0; + + // Default value. Aligns widgets to the start position of the column. For + // left-to-right layouts, aligns to the left. For right-to-left layouts, + // aligns to the right. + START = 1; + + // Aligns widgets to the center of the column. + CENTER = 2; + + // Aligns widgets to the end position of the column. For left-to-right + // layouts, aligns widgets to the right. For right-to-left layouts, aligns + // widgets to the left. + END = 3; + } + + // A widget can only have one of the following items. You can use multiple + // widget fields to display more items. + oneof data { + // Displays a text paragraph. Supports simple HTML formatted text. For more + // information about formatting text, see + // [Formatting text in Google Chat + // apps](https://developers.google.com/chat/format-messages#card-formatting) + // and + // [Formatting + // text in Google Workspace + // Add-ons](https://developers.google.com/apps-script/add-ons/concepts/widgets#text_formatting). + // + // For example, the following JSON creates a bolded text: + // ``` + // "textParagraph": { + // "text": " bold text" + // } + // ``` + TextParagraph text_paragraph = 1; + + // Displays an image. + // + // For example, the following JSON creates an image with alternative text: + // ``` + // "image": { + // "imageUrl": + // "https://developers.google.com/chat/images/quickstart-app-avatar.png", + // "altText": "Chat app avatar" + // } + // ``` + Image image = 2; + + // Displays a decorated text item. + // + // For example, the following JSON creates a decorated text widget showing + // email address: + // + // ``` + // "decoratedText": { + // "icon": { + // "knownIcon": "EMAIL" + // }, + // "topLabel": "Email Address", + // "text": "sasha@example.com", + // "bottomLabel": "This is a new Email address!", + // "switchControl": { + // "name": "has_send_welcome_email_to_sasha", + // "selected": false, + // "controlType": "CHECKBOX" + // } + // } + // ``` + DecoratedText decorated_text = 3; + + // A list of buttons. + // + // For example, the following JSON creates two buttons. The first + // is a blue text button and the second is an image button that opens a + // link: + // ``` + // "buttonList": { + // "buttons": [ + // { + // "text": "Edit", + // "color": { + // "red": 0, + // "green": 0, + // "blue": 1, + // "alpha": 1 + // }, + // "disabled": true, + // }, + // { + // "icon": { + // "knownIcon": "INVITE", + // "altText": "check calendar" + // }, + // "onClick": { + // "openLink": { + // "url": "https://example.com/calendar" + // } + // } + // } + // ] + // } + // ``` + ButtonList button_list = 4; + + // Displays a text box that users can type into. + // + // For example, the following JSON creates a text input for an email + // address: + // + // ``` + // "textInput": { + // "name": "mailing_address", + // "label": "Mailing Address" + // } + // ``` + // + // As another example, the following JSON creates a text input for a + // programming language with static suggestions: + // ``` + // "textInput": { + // "name": "preferred_programing_language", + // "label": "Preferred Language", + // "initialSuggestions": { + // "items": [ + // { + // "text": "C++" + // }, + // { + // "text": "Java" + // }, + // { + // "text": "JavaScript" + // }, + // { + // "text": "Python" + // } + // ] + // } + // } + // ``` + TextInput text_input = 5; + + // Displays a selection control that lets users select items. Selection + // controls can be checkboxes, radio buttons, switches, or dropdown menus. + // + // For example, the following JSON creates a dropdown menu that lets users + // choose a size: + // + // ``` + // "selectionInput": { + // "name": "size", + // "label": "Size" + // "type": "DROPDOWN", + // "items": [ + // { + // "text": "S", + // "value": "small", + // "selected": false + // }, + // { + // "text": "M", + // "value": "medium", + // "selected": true + // }, + // { + // "text": "L", + // "value": "large", + // "selected": false + // }, + // { + // "text": "XL", + // "value": "extra_large", + // "selected": false + // } + // ] + // } + // ``` + SelectionInput selection_input = 6; + + // Displays a widget that lets users input a date, time, or date and time. + // + // For example, the following JSON creates a date time picker to schedule an + // appointment: + // + // + // ``` + // "dateTimePicker": { + // "name": "appointment_time", + // "label": "Book your appointment at:", + // "type": "DATE_AND_TIME", + // "valueMsEpoch": "796435200000" + // } + // ``` + DateTimePicker date_time_picker = 7; + + // Displays a horizontal line divider between widgets. + // + // For example, the following JSON creates a divider: + // ``` + // "divider": { + // } + // ``` + Divider divider = 9; + + // Displays a grid with a collection of items. + // + // A grid supports any number of columns and items. The number of rows is + // determined by the upper bounds of the number items divided by the number + // of columns. A grid with 10 items and 2 columns has 5 rows. A grid with 11 + // items and 2 columns has 6 rows. + // + // [Google Workspace Add-ons and + // Chat apps](https://developers.google.com/workspace/extend): + // + // For example, the following JSON creates a 2 column grid with a single + // item: + // + // ``` + // "grid": { + // "title": "A fine collection of items", + // "columnCount": 2, + // "borderStyle": { + // "type": "STROKE", + // "cornerRadius": 4 + // }, + // "items": [ + // { + // "image": { + // "imageUri": "https://www.example.com/image.png", + // "cropStyle": { + // "type": "SQUARE" + // }, + // "borderStyle": { + // "type": "STROKE" + // } + // }, + // "title": "An item", + // "textAlignment": "CENTER" + // } + // ], + // "onClick": { + // "openLink": { + // "url": "https://www.example.com" + // } + // } + // } + // ``` + Grid grid = 10; + + // Displays up to 2 columns. + // + // To include more than 2 columns, or to use rows, use the `Grid` widget. + // + // For example, the following JSON creates 2 columns that each contain + // text paragraphs: + // + // ``` + // "columns": { + // "columnItems": [ + // { + // "horizontalSizeStyle": "FILL_AVAILABLE_SPACE", + // "horizontalAlignment": "CENTER", + // "verticalAlignment": "CENTER", + // "widgets": [ + // { + // "textParagraph": { + // "text": "First column text paragraph" + // } + // } + // ] + // }, + // { + // "horizontalSizeStyle": "FILL_AVAILABLE_SPACE", + // "horizontalAlignment": "CENTER", + // "verticalAlignment": "CENTER", + // "widgets": [ + // { + // "textParagraph": { + // "text": "Second column text paragraph" + // } + // } + // ] + // } + // ] + // } + // ``` + Columns columns = 11; + } + + // Specifies whether widgets align to the left, right, or center of a column. + HorizontalAlignment horizontal_alignment = 8; +} + +// A paragraph of text that supports formatting. For an example in +// Google Chat apps, see [Text +// paragraph](https://developers.google.com/chat/ui/widgets/text-paragraph). +// For more information +// about formatting text, see +// [Formatting text in Google Chat +// apps](https://developers.google.com/chat/format-messages#card-formatting) +// and +// [Formatting +// text in Google Workspace +// Add-ons](https://developers.google.com/apps-script/add-ons/concepts/widgets#text_formatting). +// +// [Google Workspace Add-ons and +// Chat apps](https://developers.google.com/workspace/extend): +message TextParagraph { + // The text that's shown in the widget. + string text = 1; +} + +// An image that is specified by a URL and can have an `onClick` action. For an +// example, see [Image](https://developers.google.com/chat/ui/widgets/image). +// +// [Google Workspace Add-ons and Chat +// apps](https://developers.google.com/workspace/extend): +message Image { + // The HTTPS URL that hosts the image. + // + // For example: + // + // ``` + // https://developers.google.com/chat/images/quickstart-app-avatar.png + // ``` + string image_url = 1; + + // When a user clicks the image, the click triggers this action. + OnClick on_click = 2; + + // The alternative text of this image that's used for accessibility. + string alt_text = 3; +} + +// Displays a divider between widgets as a horizontal line. For an example in +// Google Chat apps, see +// [Divider](https://developers.google.com/chat/ui/widgets/divider). +// +// [Google Workspace Add-ons and Chat +// apps](https://developers.google.com/workspace/extend): +// +// For example, the following JSON creates a divider: +// +// ``` +// "divider": {} +// ``` +message Divider {} + +// A widget that displays text with optional decorations such as a label above +// or below the text, an icon in front of the text, a selection widget, or a +// button after the text. For an example in +// Google Chat apps, see [Decorated +// text](https://developers.google.com/chat/ui/widgets/decorated-text). +// +// [Google Workspace Add-ons and Chat +// apps](https://developers.google.com/workspace/extend): +message DecoratedText { + // Either a toggle-style switch or a checkbox inside a `decoratedText` widget. + // + // [Google Workspace Add-ons and Chat + // apps](https://developers.google.com/workspace/extend): + // + // Only supported in the `decoratedText` widget. + message SwitchControl { + // How the switch appears in the user interface. + // + // [Google Workspace Add-ons + // and Chat apps](https://developers.google.com/workspace/extend): + enum ControlType { + // A toggle-style switch. + SWITCH = 0; + + // Deprecated in favor of `CHECK_BOX`. + CHECKBOX = 1; + + // A checkbox. + CHECK_BOX = 2; + } + + // The name by which the switch widget is identified in a form input event. + // + // For details about working with form inputs, see [Receive form + // data](https://developers.google.com/chat/ui/read-form-data). + string name = 1; + + // The value entered by a user, returned as part of a form input event. + // + // For details about working with form inputs, see [Receive form + // data](https://developers.google.com/chat/ui/read-form-data). + string value = 2; + + // When `true`, the switch is selected. + bool selected = 3; + + // The action to perform when the switch state is changed, such as what + // function to run. + Action on_change_action = 4; + + // How the switch appears in the user interface. + // + // [Google Workspace Add-ons + // and Chat apps](https://developers.google.com/workspace/extend): + ControlType control_type = 5; + } + + // Deprecated in favor of `startIcon`. + Icon icon = 1 [deprecated = true]; + + // The icon displayed in front of the text. + Icon start_icon = 12; + + // The text that appears above `text`. Always truncates. + string top_label = 3; + + // Required. The primary text. + // + // Supports simple formatting. For more information + // about formatting text, see + // [Formatting text in Google Chat + // apps](https://developers.google.com/chat/format-messages#card-formatting) + // and + // [Formatting + // text in Google Workspace + // Add-ons](https://developers.google.com/apps-script/add-ons/concepts/widgets#text_formatting). + string text = 4; + + // The wrap text setting. If `true`, the text wraps and displays on + // multiple lines. Otherwise, the text is truncated. + // + // Only applies to `text`, not `topLabel` and `bottomLabel`. + bool wrap_text = 5; + + // The text that appears below `text`. Always wraps. + string bottom_label = 6; + + // This action is triggered when users click `topLabel` or `bottomLabel`. + OnClick on_click = 7; + + // A button, switch, checkbox, or image that appears to the right-hand side + // of text in the `decoratedText` widget. + oneof control { + // A button that a user can click to trigger an action. + Button button = 8; + + // A switch widget that a user can click to change its state and trigger an + // action. + SwitchControl switch_control = 9; + + // An icon displayed after the text. + // + // Supports + // [built-in](https://developers.google.com/chat/format-messages#builtinicons) + // and + // [custom](https://developers.google.com/chat/format-messages#customicons) + // icons. + Icon end_icon = 11; + } +} + +// A field in which users can enter text. Supports suggestions and on-change +// actions. For an example in Google Chat apps, see [Text +// input](https://developers.google.com/chat/ui/widgets/text-input). +// +// Chat apps receive and can process the value of entered text during form input +// events. For details about working with form inputs, see [Receive form +// data](https://developers.google.com/chat/ui/read-form-data). +// +// When you need to collect undefined or abstract data from users, +// use a text input. To collect defined or enumerated data from users, use the +// [SelectionInput][google.apps.card.v1.SelectionInput] widget. +// +// [Google Workspace Add-ons and Chat +// apps](https://developers.google.com/workspace/extend): +message TextInput { + // How a text input field appears in the user interface. For example, + // whether it's a single line input field, or a multi-line input. If + // `initialSuggestions` is specified, `type` is always `SINGLE_LINE`, + // even if it's set to `MULTIPLE_LINE`. + // + // [Google Workspace Add-ons and Chat + // apps](https://developers.google.com/workspace/extend): + enum Type { + // The text input field has a fixed height of one line. + SINGLE_LINE = 0; + + // The text input field has a fixed height of multiple lines. + MULTIPLE_LINE = 1; + } + + // The name by which the text input is identified in a form input event. + // + // For details about working with form inputs, see [Receive form + // data](https://developers.google.com/chat/ui/read-form-data). + string name = 1; + + // The text that appears above the text input field in the user interface. + // + // Specify text that helps the user enter the information your app needs. + // For example, if you are asking someone's name, but specifically need their + // surname, write `surname` instead of `name`. + // + // Required if `hintText` is unspecified. Otherwise, optional. + string label = 2; + + // Text that appears below the text input field meant to assist users by + // prompting them to enter a certain value. This text is always visible. + // + // Required if `label` is unspecified. Otherwise, optional. + string hint_text = 3; + + // The value entered by a user, returned as part of a form input event. + // + // For details about working with form inputs, see [Receive form + // data](https://developers.google.com/chat/ui/read-form-data). + string value = 4; + + // How a text input field appears in the user interface. + // For example, whether the field is single or multi-line. + Type type = 5; + + // What to do when a change occurs in the text input field. For example, a + // user adding to the field or deleting text. + // + // Examples of actions to take include running a custom function or opening + // a [dialog](https://developers.google.com/chat/how-tos/dialogs) + // in Google Chat. + Action on_change_action = 6; + + // Suggested values that users can enter. These values appear when users click + // inside the text input field. As users type, the suggested values + // dynamically filter to match what the users have typed. + // + // For example, a text input field for programming language might suggest + // Java, JavaScript, Python, and C++. When users start typing `Jav`, the list + // of suggestions filters to show just `Java` and `JavaScript`. + // + // Suggested values help guide users to enter values that your app can make + // sense of. When referring to JavaScript, some users might enter `javascript` + // and others `java script`. Suggesting `JavaScript` can standardize how users + // interact with your app. + // + // When specified, `TextInput.type` is always `SINGLE_LINE`, even if it's set + // to `MULTIPLE_LINE`. + // + // [Google Workspace + // Add-ons and Chat apps](https://developers.google.com/workspace/extend): + Suggestions initial_suggestions = 7; + + // Optional. Specify what action to take when the text input field provides + // suggestions to users who interact with it. + // + // If unspecified, the suggestions are set by `initialSuggestions` and + // are processed by the client. + // + // If specified, the app takes the action specified here, such as running + // a custom function. + // + // [Google Workspace + // Add-ons](https://developers.google.com/workspace/add-ons): + Action auto_complete_action = 8; + + // Text that appears in the text input field when the field is empty. + // Use this text to prompt users to enter a value. For example, `Enter a + // number from 0 to 100`. + // + // [Google Chat apps](https://developers.google.com/chat): + string placeholder_text = 12; +} + +// Suggested values that users can enter. These values appear when users click +// inside the text input field. As users type, the suggested values +// dynamically filter to match what the users have typed. +// +// For example, a text input field for programming language might suggest +// Java, JavaScript, Python, and C++. When users start typing `Jav`, the list +// of suggestions filters to show `Java` and `JavaScript`. +// +// Suggested values help guide users to enter values that your app can make +// sense of. When referring to JavaScript, some users might enter `javascript` +// and others `java script`. Suggesting `JavaScript` can standardize how users +// interact with your app. +// +// When specified, `TextInput.type` is always `SINGLE_LINE`, even if it's set +// to `MULTIPLE_LINE`. +// +// [Google Workspace +// Add-ons and Chat apps](https://developers.google.com/workspace/extend): +message Suggestions { + // One suggested value that users can enter in a text input field. + // + // [Google Workspace Add-ons and Chat + // apps](https://developers.google.com/workspace/extend): + message SuggestionItem { + oneof content { + // The value of a suggested input to a text input field. This is + // equivalent to what users enter themselves. + string text = 1; + } + } + + // A list of suggestions used for autocomplete recommendations in text input + // fields. + repeated SuggestionItem items = 1; +} + +// A list of buttons layed out horizontally. For an example in +// Google Chat apps, see +// [Button list](https://developers.google.com/chat/ui/widgets/button-list). +// +// [Google Workspace Add-ons and Chat +// apps](https://developers.google.com/workspace/extend): +message ButtonList { + // An array of buttons. + repeated Button buttons = 1; +} + +// A widget that creates one or more UI items that users can select. +// For example, a dropdown menu or checkboxes. You can use this widget to +// collect data that can be predicted or enumerated. For an example in Google +// Chat apps, see [Selection +// input](https://developers.google.com/chat/ui/widgets/selection-input). +// +// Chat apps can process the value of items that users select or input. For +// details about working with form inputs, see [Receive form +// data](https://developers.google.com/chat/ui/read-form-data). +// +// To collect undefined or abstract data from users, use +// the [TextInput][google.apps.card.v1.TextInput] widget. +// +// [Google Workspace Add-ons +// and Chat apps](https://developers.google.com/workspace/extend): +message SelectionInput { + // The format for the items that users can select. Different options support + // different types of interactions. For example, users can select multiple + // checkboxes, but can only select one item from a dropdown menu. + // + // Each selection input supports one type of selection. Mixing checkboxes + // and switches, for example, isn't supported. + // + // [Google Workspace Add-ons and Chat + // apps](https://developers.google.com/workspace/extend): + enum SelectionType { + // A set of checkboxes. Users can select one or more checkboxes. + CHECK_BOX = 0; + + // A set of radio buttons. Users can select one radio button. + RADIO_BUTTON = 1; + + // A set of switches. Users can turn on one or more switches. + SWITCH = 2; + + // A dropdown menu. Users can select one item from the menu. + DROPDOWN = 3; + + // A multiselect menu for static or dynamic data. From the menu bar, + // users select one or more items. Users can also input values to populate + // dynamic data. For example, users can start typing the name of a Google + // Chat space and the widget autosuggests the space. + // + // To populate items for a multiselect menu, you can use one of the + // following types of data sources: + // + // * Static data: Items are specified as `SelectionItem` objects in the + // widget. Up to 100 items. + // * Google Workspace data: Items are populated using data from Google + // Workspace, such as Google Workspace users or Google Chat spaces. + // * External data: Items are populated from an external data + // source outside of Google Workspace. + // + // For examples of how to implement multiselect menus, see the + // [`SelectionInput` widget + // page](https://developers.google.com/chat/ui/widgets/selection-input#multiselect-menu). + // + // [Google Workspace Add-ons and Chat + // apps](https://developers.google.com/workspace/extend): + // multiselect for Google Workspace Add-ons are in + // [Developer Preview](https://developers.google.com/workspace/preview). + MULTI_SELECT = 4; + } + + // An item that users can select in a selection input, such as a checkbox + // or switch. + // + // [Google Workspace Add-ons and Chat + // apps](https://developers.google.com/workspace/extend): + message SelectionItem { + // The text that identifies or describes the item to users. + string text = 1; + + // The value associated with this item. The client should use this as a form + // input value. + // + // For details about working with form inputs, see [Receive form + // data](https://developers.google.com/chat/ui/read-form-data). + string value = 2; + + // Whether the item is selected by default. If the selection input only + // accepts one value (such as for radio buttons or a dropdown menu), only + // set this field for one item. + bool selected = 3; + + // For multiselect menus, the URL for the icon displayed next to + // the item's `text` field. Supports PNG and JPEG files. Must be an `HTTPS` + // URL. For example, + // `https://developers.google.com/chat/images/quickstart-app-avatar.png`. + string start_icon_uri = 4; + + // For multiselect menus, a text description or label that's + // displayed below the item's `text` field. + string bottom_text = 5; + } + + // For a + // [`SelectionInput`][google.apps.card.v1.SelectionInput] widget that uses a + // multiselect menu, a data source from Google Workspace. Used to populate + // items in a multiselect menu. + // + // [Google Chat apps](https://developers.google.com/chat): + message PlatformDataSource { + // A data source shared by all [Google Workspace + // applications] + // (https://developers.google.com/chat/api/reference/rest/v1/HostApp). + // + // [Google Chat apps](https://developers.google.com/chat): + enum CommonDataSource { + // Default value. Don't use. + UNKNOWN = 0; + + // Google Workspace users. The user can only view and select users from + // their Google Workspace organization. + USER = 1; + } + + // The data source. + oneof data_source { + // A data source shared by all Google Workspace applications, such as + // users in a Google Workspace organization. + CommonDataSource common_data_source = 1; + } + } + + // The name that identifies the selection input in a form input event. + // + // For details about working with form inputs, see [Receive form + // data](https://developers.google.com/chat/ui/read-form-data). + string name = 1; + + // The text that appears above the selection input field in the user + // interface. + // + // Specify text that helps the user enter the information your app needs. + // For example, if users are selecting the urgency of a work ticket from a + // drop-down menu, the label might be "Urgency" or "Select urgency". + string label = 2; + + // The type of items that are displayed to users in a `SelectionInput` widget. + // Selection types support different types of interactions. For example, users + // can select one or more checkboxes, but they can only select one value from + // a dropdown menu. + SelectionType type = 3; + + // An array of selectable items. For example, an array of radio buttons or + // checkboxes. Supports up to 100 items. + repeated SelectionItem items = 4; + + // If specified, the form is submitted when the selection changes. If not + // specified, you must specify a separate button that submits the form. + // + // For details about working with form inputs, see [Receive form + // data](https://developers.google.com/chat/ui/read-form-data). + Action on_change_action = 5; + + // For multiselect menus, the maximum number of items that a user can select. + // Minimum value is 1 item. If unspecified, defaults to 3 items. + int32 multi_select_max_selected_items = 6; + + // For multiselect menus, the number of text characters that a user inputs + // before the Chat app queries autocomplete and displays suggested items + // in the menu. + // + // If unspecified, defaults to 0 characters for static data sources and 3 + // characters for external data sources. + int32 multi_select_min_query_length = 7; + + // For a multiselect menu, the data source that populates + // selection items. + // + // [Google Chat apps](https://developers.google.com/chat): + oneof multi_select_data_source { + // An external data source, such as a relational data base. + Action external_data_source = 8; + + // A data source from Google Workspace. + PlatformDataSource platform_data_source = 9; + } +} + +// Lets users input a date, a time, or both a date and a time. For an example in +// Google Chat apps, see [Date time +// picker](https://developers.google.com/chat/ui/widgets/date-time-picker). +// +// Users can input text or use the picker to select dates and times. If users +// input an invalid date or time, the picker shows an error that prompts users +// to input the information correctly. +// +// [Google Workspace +// Add-ons and Chat apps](https://developers.google.com/workspace/extend): +message DateTimePicker { + // The format for the date and time in the `DateTimePicker` widget. + // Determines whether users can input a date, a time, or both a date and time. + // + // [Google Workspace Add-ons and Chat + // apps](https://developers.google.com/workspace/extend): + enum DateTimePickerType { + // Users input a date and time. + DATE_AND_TIME = 0; + + // Users input a date. + DATE_ONLY = 1; + + // Users input a time. + TIME_ONLY = 2; + } + + // The name by which the `DateTimePicker` is identified in a form input event. + // + // For details about working with form inputs, see [Receive form + // data](https://developers.google.com/chat/ui/read-form-data). + string name = 1; + + // The text that prompts users to input a date, a time, or a date and time. + // For example, if users are scheduling an appointment, use a label such as + // `Appointment date` or `Appointment date and time`. + string label = 2; + + // Whether the widget supports inputting a date, a time, or the date and time. + DateTimePickerType type = 3; + + // The default value displayed in the widget, in milliseconds since [Unix + // epoch time](https://en.wikipedia.org/wiki/Unix_time). + // + // Specify the value based on the type of picker (`DateTimePickerType`): + // + // * `DATE_AND_TIME`: a calendar date and time in UTC. For example, to + // represent January 1, 2023 at 12:00 PM UTC, use `1672574400000`. + // * `DATE_ONLY`: a calendar date at 00:00:00 UTC. For example, to represent + // January 1, 2023, use `1672531200000`. + // * `TIME_ONLY`: a time in UTC. For example, to represent 12:00 PM, use + // `43200000` (or `12 * 60 * 60 * 1000`). + int64 value_ms_epoch = 4; + + // The number representing the time zone offset from UTC, in minutes. + // If set, the `value_ms_epoch` is displayed in the specified time zone. + // If unset, the value defaults to the user's time zone setting. + int32 timezone_offset_date = 5; + + // Triggered when the user clicks **Save** or **Clear** from the + // `DateTimePicker` interface. + Action on_change_action = 6; +} + +// A text, icon, or text and icon button that users can click. For an example in +// Google Chat apps, see +// [Button list](https://developers.google.com/chat/ui/widgets/button-list). +// +// To make an image a clickable button, specify an +// [`Image`][google.apps.card.v1.Image] (not an +// [`ImageComponent`][google.apps.card.v1.ImageComponent]) and set an +// `onClick` action. +// +// [Google Workspace +// Add-ons and Chat apps](https://developers.google.com/workspace/extend): +message Button { + // The text displayed inside the button. + string text = 1; + + // The icon image. If both `icon` and `text` are set, then the icon appears + // before the text. + Icon icon = 2; + + // If set, the button is filled with a solid background color and the font + // color changes to maintain contrast with the background color. For example, + // setting a blue background likely results in white text. + // + // If unset, the image background is white and the font color is blue. + // + // For red, green, and blue, the value of each field is a `float` number that + // you can express in either of two ways: as a number between 0 and 255 + // divided by 255 (153/255), or as a value between 0 and 1 (0.6). 0 represents + // the absence of a color and 1 or 255/255 represent the full presence of that + // color on the RGB scale. + // + // Optionally set `alpha`, which sets a level of transparency using this + // equation: + // + // ``` + // pixel color = alpha * (this color) + (1.0 - alpha) * (background color) + // ``` + // + // For `alpha`, a value of `1` corresponds with a solid color, and a value of + // `0` corresponds with a completely transparent color. + // + // For example, the following color represents a half transparent red: + // + // ``` + // "color": { + // "red": 1, + // "green": 0, + // "blue": 0, + // "alpha": 0.5 + // } + // ``` + google.type.Color color = 3; + + // Required. The action to perform when a user clicks the button, such as + // opening a hyperlink or running a custom function. + OnClick on_click = 4; + + // If `true`, the button is displayed in an inactive state and doesn't respond + // to user actions. + bool disabled = 5; + + // The alternative text that's used for accessibility. + // + // Set descriptive text that lets users know what the button does. For + // example, if a button opens a hyperlink, you might write: "Opens a new + // browser tab and navigates to the Google Chat developer documentation at + // https://developers.google.com/chat". + string alt_text = 6; +} + +// An icon displayed in a widget on a card. For an example in Google Chat apps, +// see [Icon](https://developers.google.com/chat/ui/widgets/icon). +// +// Supports +// [built-in](https://developers.google.com/chat/format-messages#builtinicons) +// and +// [custom](https://developers.google.com/chat/format-messages#customicons) +// icons. +// +// [Google Workspace Add-ons and Chat +// apps](https://developers.google.com/workspace/extend): +message Icon { + // The icon displayed in the widget on the card. + oneof icons { + // Display one of the built-in icons provided by Google Workspace. + // + // For example, to display an airplane icon, specify `AIRPLANE`. + // For a bus, specify `BUS`. + // + // For a full list of supported icons, see [built-in + // icons](https://developers.google.com/chat/format-messages#builtinicons). + string known_icon = 1; + + // Display a custom icon hosted at an HTTPS URL. + // + // For example: + // + // ``` + // "iconUrl": + // "https://developers.google.com/chat/images/quickstart-app-avatar.png" + // ``` + // + // Supported file types include `.png` and `.jpg`. + string icon_url = 2; + } + + // Optional. A description of the icon used for accessibility. + // If unspecified, the default value `Button` is provided. As a best practice, + // you should set a helpful description for what the icon displays, and if + // applicable, what it does. For example, `A user's account portrait`, or + // `Opens a new browser tab and navigates to the Google Chat developer + // documentation at https://developers.google.com/chat`. + // + // If the icon is set in a [`Button`][google.apps.card.v1.Button], the + // `altText` appears as helper text when the user hovers over the button. + // However, if the button also sets `text`, the icon's `altText` is ignored. + string alt_text = 3; + + // The crop style applied to the image. In some cases, applying a + // `CIRCLE` crop causes the image to be drawn larger than a built-in + // icon. + Widget.ImageType image_type = 4; +} + +// Represents the crop style applied to an image. +// +// [Google Workspace Add-ons and +// Chat apps](https://developers.google.com/workspace/extend): +// +// For example, here's how to apply a 16:9 aspect ratio: +// +// ``` +// cropStyle { +// "type": "RECTANGLE_CUSTOM", +// "aspectRatio": 16/9 +// } +// ``` +message ImageCropStyle { + // Represents the crop style applied to an image. + // + // [Google Workspace Add-ons + // and Chat apps](https://developers.google.com/workspace/extend): + enum ImageCropType { + // Don't use. Unspecified. + IMAGE_CROP_TYPE_UNSPECIFIED = 0; + + // Default value. Applies a square crop. + SQUARE = 1; + + // Applies a circular crop. + CIRCLE = 2; + + // Applies a rectangular crop with a custom aspect ratio. Set the custom + // aspect ratio with `aspectRatio`. + RECTANGLE_CUSTOM = 3; + + // Applies a rectangular crop with a 4:3 aspect ratio. + RECTANGLE_4_3 = 4; + } + + // The crop type. + ImageCropType type = 1; + + // The aspect ratio to use if the crop type is `RECTANGLE_CUSTOM`. + // + // For example, here's how to apply a 16:9 aspect ratio: + // + // ``` + // cropStyle { + // "type": "RECTANGLE_CUSTOM", + // "aspectRatio": 16/9 + // } + // ``` + double aspect_ratio = 2; +} + +// The style options for the border of a card or widget, including the border +// type and color. +// +// [Google Workspace Add-ons and Chat +// apps](https://developers.google.com/workspace/extend): +message BorderStyle { + // Represents the border types applied to widgets. + // + // [Google Workspace Add-ons + // and Chat apps](https://developers.google.com/workspace/extend): + enum BorderType { + // Don't use. Unspecified. + BORDER_TYPE_UNSPECIFIED = 0; + + // Default value. No border. + NO_BORDER = 1; + + // Outline. + STROKE = 2; + } + + // The border type. + BorderType type = 1; + + // The colors to use when the type is `BORDER_TYPE_STROKE`. + google.type.Color stroke_color = 2; + + // The corner radius for the border. + int32 corner_radius = 3; +} + +// Represents an image. +// +// [Google Workspace Add-ons and Chat +// apps](https://developers.google.com/workspace/extend): +message ImageComponent { + // The image URL. + string image_uri = 1; + + // The accessibility label for the image. + string alt_text = 2; + + // The crop style to apply to the image. + ImageCropStyle crop_style = 3; + + // The border style to apply to the image. + BorderStyle border_style = 4; +} + +// Displays a grid with a collection of items. Items can only include text or +// images. For responsive columns, or to include more than text or images, use +// [`Columns`][google.apps.card.v1.Columns]. For an example in Google Chat apps, +// see [Grid](https://developers.google.com/chat/ui/widgets/grid). +// +// A grid supports any number of columns and items. The number of rows is +// determined by items divided by columns. A grid with +// 10 items and 2 columns has 5 rows. A grid with 11 items and 2 columns +// has 6 rows. +// +// [Google Workspace Add-ons and Chat +// apps](https://developers.google.com/workspace/extend): +// +// For example, the following JSON creates a 2 column grid with a single +// item: +// +// ``` +// "grid": { +// "title": "A fine collection of items", +// "columnCount": 2, +// "borderStyle": { +// "type": "STROKE", +// "cornerRadius": 4 +// }, +// "items": [ +// { +// "image": { +// "imageUri": "https://www.example.com/image.png", +// "cropStyle": { +// "type": "SQUARE" +// }, +// "borderStyle": { +// "type": "STROKE" +// } +// }, +// "title": "An item", +// "textAlignment": "CENTER" +// } +// ], +// "onClick": { +// "openLink": { +// "url": "https://www.example.com" +// } +// } +// } +// ``` +message Grid { + // Represents an item in a grid layout. Items can contain text, an image, or + // both text and an image. + // + // [Google Workspace Add-ons and Chat + // apps](https://developers.google.com/workspace/extend): + message GridItem { + // Represents the various layout options available for a grid item. + // + // [Google Workspace Add-ons and Chat + // apps](https://developers.google.com/workspace/extend): + enum GridItemLayout { + // Don't use. Unspecified. + GRID_ITEM_LAYOUT_UNSPECIFIED = 0; + + // The title and subtitle are shown below the grid item's image. + TEXT_BELOW = 1; + + // The title and subtitle are shown above the grid item's image. + TEXT_ABOVE = 2; + } + + // A user-specified identifier for this grid item. This identifier is + // returned in the parent grid's `onClick` callback parameters. + string id = 1; + + // The image that displays in the grid item. + ImageComponent image = 2; + + // The grid item's title. + string title = 3; + + // The grid item's subtitle. + string subtitle = 4; + + // The layout to use for the grid item. + GridItemLayout layout = 9; + } + + // The text that displays in the grid header. + string title = 1; + + // The items to display in the grid. + repeated GridItem items = 2; + + // The border style to apply to each grid item. + BorderStyle border_style = 3; + + // The number of columns to display in the grid. A default value + // is used if this field isn't specified, and that default value is + // different depending on where the grid is shown (dialog versus companion). + int32 column_count = 4; + + // This callback is reused by each individual grid item, but with the + // item's identifier and index in the items list added to the callback's + // parameters. + OnClick on_click = 5; +} + +// The `Columns` widget displays up to 2 columns in a card or dialog. You can +// add widgets to each column; the widgets appear in the order that they are +// specified. For an example in Google Chat apps, see +// [Columns](https://developers.google.com/chat/ui/widgets/columns). +// +// The height of each column is determined by the taller column. For example, if +// the first column is taller than the second column, both columns have the +// height of the first column. Because each column can contain a different +// number of widgets, you can't define rows or align widgets between the +// columns. +// +// Columns are displayed side-by-side. You can customize the width of each +// column using the `HorizontalSizeStyle` field. If the user's +// screen width is too narrow, the second column wraps below the first: +// +// * On web, the second column wraps if the screen width is less than or equal +// to 480 pixels. +// * On iOS devices, the second column wraps if the screen width is +// less than or equal to 300 pt. +// * On Android devices, the second column wraps if the screen width is +// less than or equal to 320 dp. +// +// To include more than 2 columns, or to use rows, use the +// [`Grid`][google.apps.card.v1.Grid] widget. +// +// [Google Workspace Add-ons and Chat +// apps](https://developers.google.com/workspace/extend): +// Columns for Google Workspace Add-ons are in +// [Developer Preview](https://developers.google.com/workspace/preview). +message Columns { + // A column. + // + // [Google Chat apps](https://developers.google.com/chat): + message Column { + // Specifies how a column fills the width of the card. The width of each + // column depends on both the `HorizontalSizeStyle` and the width of the + // widgets within the column. + // + // [Google Chat apps](https://developers.google.com/chat): + enum HorizontalSizeStyle { + // Don't use. Unspecified. + HORIZONTAL_SIZE_STYLE_UNSPECIFIED = 0; + + // Default value. Column fills the available space, up to 70% of the + // card's width. If both columns are set to `FILL_AVAILABLE_SPACE`, each + // column fills 50% of the space. + FILL_AVAILABLE_SPACE = 1; + + // Column fills the least amount of space possible and no more than 30% of + // the card's width. + FILL_MINIMUM_SPACE = 2; + } + + // Specifies whether widgets align to the top, bottom, or center of a + // column. + // + // [Google Chat apps](https://developers.google.com/chat): + enum VerticalAlignment { + // Don't use. Unspecified. + VERTICAL_ALIGNMENT_UNSPECIFIED = 0; + + // Default value. Aligns widgets to the center of a column. + CENTER = 1; + + // Aligns widgets to the top of a column. + TOP = 2; + + // Aligns widgets to the bottom of a column. + BOTTOM = 3; + } + + // The supported widgets that you can include in a column. + // + // [Google Chat apps](https://developers.google.com/chat): + message Widgets { + oneof data { + // [TextParagraph][google.apps.card.v1.TextParagraph] widget. + TextParagraph text_paragraph = 1; + + // [Image][google.apps.card.v1.Image] widget. + Image image = 2; + + // [DecoratedText][google.apps.card.v1.DecoratedText] widget. + DecoratedText decorated_text = 3; + + // [ButtonList][google.apps.card.v1.ButtonList] widget. + ButtonList button_list = 4; + + // [TextInput][google.apps.card.v1.TextInput] widget. + TextInput text_input = 5; + + // [SelectionInput][google.apps.card.v1.SelectionInput] widget. + SelectionInput selection_input = 6; + + // [DateTimePicker][google.apps.card.v1.DateTimePicker] widget. + DateTimePicker date_time_picker = 7; + } + } + + // Specifies how a column fills the width of the card. + // + // [Google Chat apps](https://developers.google.com/chat): + HorizontalSizeStyle horizontal_size_style = 1; + + // Specifies whether widgets align to the left, right, or center of a + // column. + Widget.HorizontalAlignment horizontal_alignment = 2; + + // Specifies whether widgets align to the top, bottom, or center of a + // column. + // + // [Google Chat apps](https://developers.google.com/chat): + VerticalAlignment vertical_alignment = 3; + + // An array of widgets included in a column. Widgets appear in the order + // that they are specified. + repeated Widgets widgets = 4; + } + + // An array of columns. You can include up to 2 columns in a card or dialog. + repeated Column column_items = 2; +} + +// Represents how to respond when users click an interactive element on +// a card, such as a button. +// +// [Google Workspace Add-ons and Chat +// apps](https://developers.google.com/workspace/extend): +message OnClick { + oneof data { + // If specified, an action is triggered by this `onClick`. + Action action = 1; + + // If specified, this `onClick` triggers an open link action. + OpenLink open_link = 2; + + // An add-on triggers this action when the action needs to open a + // link. This differs from the `open_link` above in that this needs to talk + // to server to get the link. Thus some preparation work is required for + // web client to do before the open link action response comes back. + // + // [Google Workspace + // Add-ons](https://developers.google.com/workspace/add-ons): + Action open_dynamic_link_action = 3; + + // A new card is pushed to the card stack after clicking if specified. + // + // [Google Workspace + // Add-ons](https://developers.google.com/workspace/add-ons): + Card card = 4; + } +} + +// Represents an `onClick` event that opens a hyperlink. +// +// [Google Workspace Add-ons and Chat +// apps](https://developers.google.com/workspace/extend): +message OpenLink { + // When an `OnClick` action opens a link, then the client can either open it + // as a full-size window (if that's the frame used by the client), or an + // overlay (such as a pop-up). The implementation depends on the client + // platform capabilities, and the value selected might be ignored if the + // client doesn't support it. `FULL_SIZE` is supported by all clients. + // + // [Google Workspace + // Add-ons](https://developers.google.com/workspace/add-ons): + enum OpenAs { + // The link opens as a full-size window (if that's the frame used by the + // client). + FULL_SIZE = 0; + + // The link opens as an overlay, such as a pop-up. + OVERLAY = 1; + } + + // What the client does when a link opened by an `OnClick` action is closed. + // + // Implementation depends on client platform capabilities. For example, a web + // browser might open a link in a pop-up window with an `OnClose` handler. + // + // If both `OnOpen` and `OnClose` handlers are set, and the client platform + // can't support both values, `OnClose` takes precedence. + // + // [Google Workspace + // Add-ons](https://developers.google.com/workspace/add-ons): + enum OnClose { + // Default value. The card doesn't reload; nothing happens. + NOTHING = 0; + + // Reloads the card after the child window closes. + // + // If used in conjunction with + // [`OpenAs.OVERLAY`](https://developers.google.com/workspace/add-ons/reference/rpc/google.apps.card.v1#openas), + // the child window acts as a modal dialog and the parent card is blocked + // until the child window closes. + RELOAD = 1; + } + + // The URL to open. + string url = 1; + + // How to open a link. + // + // [Google Workspace + // Add-ons](https://developers.google.com/workspace/add-ons): + OpenAs open_as = 2; + + // Whether the client forgets about a link after opening it, or observes it + // until the window closes. + // + // [Google Workspace + // Add-ons](https://developers.google.com/workspace/add-ons): + OnClose on_close = 3; +} + +// An action that describes the behavior when the form is submitted. +// For example, you can invoke an Apps Script script to handle the form. +// If the action is triggered, the form values are sent to the server. +// +// [Google Workspace Add-ons and Chat +// apps](https://developers.google.com/workspace/extend): +message Action { + // List of string parameters to supply when the action method is invoked. + // For example, consider three snooze buttons: snooze now, snooze one day, + // or snooze next week. You might use `action method = snooze()`, passing the + // snooze type and snooze time in the list of string parameters. + // + // To learn more, see + // [`CommonEventObject`](https://developers.google.com/chat/api/reference/rest/v1/Event#commoneventobject). + // + // [Google Workspace Add-ons and Chat + // apps](https://developers.google.com/workspace/extend): + message ActionParameter { + // The name of the parameter for the action script. + string key = 1; + + // The value of the parameter. + string value = 2; + } + + // Specifies the loading indicator that the action displays while + // making the call to the action. + // + // [Google Workspace Add-ons and Chat + // apps](https://developers.google.com/workspace/extend): + enum LoadIndicator { + // Displays a spinner to indicate that content is loading. + SPINNER = 0; + + // Nothing is displayed. + NONE = 1; + } + + // Optional. Required when opening a + // [dialog](https://developers.google.com/chat/how-tos/dialogs). + // + // What to do in response to an interaction with a user, such as a user + // clicking a button in a card message. + // + // If unspecified, the app responds by executing an `action`—like opening a + // link or running a function—as normal. + // + // By specifying an `interaction`, the app can respond in special interactive + // ways. For example, by setting `interaction` to `OPEN_DIALOG`, the app can + // open a [dialog](https://developers.google.com/chat/how-tos/dialogs). + // + // When specified, a loading indicator isn't shown. If specified for + // an add-on, the entire card is stripped and nothing is shown in the client. + // + // [Google Chat apps](https://developers.google.com/chat): + enum Interaction { + // Default value. The `action` executes as normal. + INTERACTION_UNSPECIFIED = 0; + + // Opens a [dialog](https://developers.google.com/chat/how-tos/dialogs), a + // windowed, card-based interface that Chat apps use to interact with users. + // + // Only supported by Chat apps in response to button-clicks on card + // messages. If specified for + // an add-on, the entire card is stripped and nothing is shown in the + // client. + // + // [Google Chat apps](https://developers.google.com/chat): + OPEN_DIALOG = 1; + } + + // A custom function to invoke when the containing element is + // clicked or othrwise activated. + // + // For example usage, see [Create interactive + // cards](https://developers.google.com/chat/how-tos/cards-onclick). + string function = 1; + + // List of action parameters. + repeated ActionParameter parameters = 2; + + // Specifies the loading indicator that the action displays while + // making the call to the action. + LoadIndicator load_indicator = 3; + + // Indicates whether form values persist after the action. The default value + // is `false`. + // + // If `true`, form values remain after the action is triggered. To let the + // user make changes while the action is being processed, set + // [`LoadIndicator`](https://developers.google.com/workspace/add-ons/reference/rpc/google.apps.card.v1#loadindicator) + // to `NONE`. For [card + // messages](https://developers.google.com/chat/api/guides/v1/messages/create#create) + // in Chat apps, you must also set the action's + // [`ResponseType`](https://developers.google.com/chat/api/reference/rest/v1/spaces.messages#responsetype) + // to `UPDATE_MESSAGE` and use the same + // [`card_id`](https://developers.google.com/chat/api/reference/rest/v1/spaces.messages#CardWithId) + // from the card that contained the action. + // + // If `false`, the form values are cleared when the action is triggered. + // To prevent the user from making changes while the action is being + // processed, set + // [`LoadIndicator`](https://developers.google.com/workspace/add-ons/reference/rpc/google.apps.card.v1#loadindicator) + // to `SPINNER`. + bool persist_values = 4; + + // Optional. Required when opening a + // [dialog](https://developers.google.com/chat/how-tos/dialogs). + // + // What to do in response to an interaction with a user, such as a user + // clicking a button in a card message. + // + // If unspecified, the app responds by executing an `action`—like opening a + // link or running a function—as normal. + // + // By specifying an `interaction`, the app can respond in special interactive + // ways. For example, by setting `interaction` to `OPEN_DIALOG`, the app can + // open a [dialog](https://developers.google.com/chat/how-tos/dialogs). When + // specified, a loading indicator isn't shown. If specified for + // an add-on, the entire card is stripped and nothing is shown in the client. + // + // [Google Chat apps](https://developers.google.com/chat): + Interaction interaction = 5; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/cloud/audit/audit_log.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/cloud/audit/audit_log.proto new file mode 100644 index 000000000000..ed4ae5f48f1b --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/cloud/audit/audit_log.proto @@ -0,0 +1,353 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.audit; + +import "google/api/field_behavior.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/struct.proto"; +import "google/rpc/context/attribute_context.proto"; +import "google/rpc/status.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/cloud/audit;audit"; +option java_multiple_files = true; +option java_outer_classname = "AuditLogProto"; +option java_package = "com.google.cloud.audit"; + +// Common audit log format for Google Cloud Platform API operations. +message AuditLog { + // The name of the API service performing the operation. For example, + // `"compute.googleapis.com"`. + string service_name = 7; + + // The name of the service method or operation. + // For API calls, this should be the name of the API method. + // For example, + // + // "google.cloud.bigquery.v2.TableService.InsertTable" + // "google.logging.v2.ConfigServiceV2.CreateSink" + string method_name = 8; + + // The resource or collection that is the target of the operation. + // The name is a scheme-less URI, not including the API service name. + // For example: + // + // "projects/PROJECT_ID/zones/us-central1-a/instances" + // "projects/PROJECT_ID/datasets/DATASET_ID" + string resource_name = 11; + + // The resource location information. + ResourceLocation resource_location = 20; + + // The resource's original state before mutation. Present only for + // operations which have successfully modified the targeted resource(s). + // In general, this field should contain all changed fields, except those + // that are already been included in `request`, `response`, `metadata` or + // `service_data` fields. + // When the JSON object represented here has a proto equivalent, + // the proto name will be indicated in the `@type` property. + google.protobuf.Struct resource_original_state = 19; + + // The number of items returned from a List or Query API method, + // if applicable. + int64 num_response_items = 12; + + // The status of the overall operation. + google.rpc.Status status = 2; + + // Authentication information. + AuthenticationInfo authentication_info = 3; + + // Authorization information. If there are multiple + // resources or permissions involved, then there is + // one AuthorizationInfo element for each {resource, permission} tuple. + repeated AuthorizationInfo authorization_info = 9; + + // Indicates the policy violations for this request. If the request + // is denied by the policy, violation information will be logged + // here. + PolicyViolationInfo policy_violation_info = 25; + + // Metadata about the operation. + RequestMetadata request_metadata = 4; + + // The operation request. This may not include all request parameters, + // such as those that are too large, privacy-sensitive, or duplicated + // elsewhere in the log record. + // It should never include user-generated data, such as file contents. + // When the JSON object represented here has a proto equivalent, the proto + // name will be indicated in the `@type` property. + google.protobuf.Struct request = 16; + + // The operation response. This may not include all response elements, + // such as those that are too large, privacy-sensitive, or duplicated + // elsewhere in the log record. + // It should never include user-generated data, such as file contents. + // When the JSON object represented here has a proto equivalent, the proto + // name will be indicated in the `@type` property. + google.protobuf.Struct response = 17; + + // Other service-specific data about the request, response, and other + // information associated with the current audited event. + google.protobuf.Struct metadata = 18; + + // Deprecated. Use the `metadata` field instead. + // Other service-specific data about the request, response, and other + // activities. + google.protobuf.Any service_data = 15 [deprecated = true]; +} + +// Authentication information for the operation. +message AuthenticationInfo { + // The email address of the authenticated user (or service account on behalf + // of third party principal) making the request. For third party identity + // callers, the `principal_subject` field is populated instead of this field. + // For privacy reasons, the principal email address is sometimes redacted. + // For more information, see [Caller identities in audit + // logs](https://cloud.google.com/logging/docs/audit#user-id). + string principal_email = 1; + + // The authority selector specified by the requestor, if any. + // It is not guaranteed that the principal was allowed to use this authority. + string authority_selector = 2; + + // The third party identification (if any) of the authenticated user making + // the request. + // When the JSON object represented here has a proto equivalent, the proto + // name will be indicated in the `@type` property. + google.protobuf.Struct third_party_principal = 4; + + // The name of the service account key used to create or exchange + // credentials for authenticating the service account making the request. + // This is a scheme-less URI full resource name. For example: + // + // "//iam.googleapis.com/projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}/keys/{key}" + string service_account_key_name = 5; + + // Identity delegation history of an authenticated service account that makes + // the request. It contains information on the real authorities that try to + // access GCP resources by delegating on a service account. When multiple + // authorities present, they are guaranteed to be sorted based on the original + // ordering of the identity delegation events. + repeated ServiceAccountDelegationInfo service_account_delegation_info = 6; + + // String representation of identity of requesting party. + // Populated for both first and third party identities. + string principal_subject = 8; +} + +// Authorization information for the operation. +message AuthorizationInfo { + // The resource being accessed, as a REST-style or cloud resource string. + // For example: + // + // bigquery.googleapis.com/projects/PROJECTID/datasets/DATASETID + // or + // projects/PROJECTID/datasets/DATASETID + string resource = 1; + + // The required IAM permission. + string permission = 2; + + // Whether or not authorization for `resource` and `permission` + // was granted. + bool granted = 3; + + // Resource attributes used in IAM condition evaluation. This field contains + // resource attributes like resource type and resource name. + // + // To get the whole view of the attributes used in IAM + // condition evaluation, the user must also look into + // `AuditLog.request_metadata.request_attributes`. + google.rpc.context.AttributeContext.Resource resource_attributes = 5; +} + +// Metadata about the request. +message RequestMetadata { + // The IP address of the caller. + // For a caller from the internet, this will be the public IPv4 or IPv6 + // address. For calls made from inside Google's internal production network + // from one GCP service to another, `caller_ip` will be redacted to "private". + // For a caller from a Compute Engine VM with a external IP address, + // `caller_ip` will be the VM's external IP address. For a caller from a + // Compute Engine VM without a external IP address, if the VM is in the same + // organization (or project) as the accessed resource, `caller_ip` will be the + // VM's internal IPv4 address, otherwise `caller_ip` will be redacted to + // "gce-internal-ip". See https://cloud.google.com/compute/docs/vpc/ for more + // information. + string caller_ip = 1; + + // The user agent of the caller. + // This information is not authenticated and should be treated accordingly. + // For example: + // + // + `google-api-python-client/1.4.0`: + // The request was made by the Google API client for Python. + // + `Cloud SDK Command Line Tool apitools-client/1.0 gcloud/0.9.62`: + // The request was made by the Google Cloud SDK CLI (gcloud). + // + `AppEngine-Google; (+http://code.google.com/appengine; appid: + // s~my-project`: + // The request was made from the `my-project` App Engine app. + string caller_supplied_user_agent = 2; + + // The network of the caller. + // Set only if the network host project is part of the same GCP organization + // (or project) as the accessed resource. + // See https://cloud.google.com/compute/docs/vpc/ for more information. + // This is a scheme-less URI full resource name. For example: + // + // "//compute.googleapis.com/projects/PROJECT_ID/global/networks/NETWORK_ID" + string caller_network = 3; + + // Request attributes used in IAM condition evaluation. This field contains + // request attributes like request time and access levels associated with + // the request. + // + // + // To get the whole view of the attributes used in IAM + // condition evaluation, the user must also look into + // `AuditLog.authentication_info.resource_attributes`. + google.rpc.context.AttributeContext.Request request_attributes = 7; + + // The destination of a network activity, such as accepting a TCP connection. + // In a multi hop network activity, the destination represents the receiver of + // the last hop. Only two fields are used in this message, Peer.port and + // Peer.ip. These fields are optionally populated by those services utilizing + // the IAM condition feature. + google.rpc.context.AttributeContext.Peer destination_attributes = 8; +} + +// Location information about a resource. +message ResourceLocation { + // The locations of a resource after the execution of the operation. + // Requests to create or delete a location based resource must populate + // the 'current_locations' field and not the 'original_locations' field. + // For example: + // + // "europe-west1-a" + // "us-east1" + // "nam3" + repeated string current_locations = 1; + + // The locations of a resource prior to the execution of the operation. + // Requests that mutate the resource's location must populate both the + // 'original_locations' as well as the 'current_locations' fields. + // For example: + // + // "europe-west1-a" + // "us-east1" + // "nam3" + repeated string original_locations = 2; +} + +// Identity delegation history of an authenticated service account. +message ServiceAccountDelegationInfo { + // First party identity principal. + message FirstPartyPrincipal { + // The email address of a Google account. + string principal_email = 1; + + // Metadata about the service that uses the service account. + google.protobuf.Struct service_metadata = 2; + } + + // Third party identity principal. + message ThirdPartyPrincipal { + // Metadata about third party identity. + google.protobuf.Struct third_party_claims = 1; + } + + // A string representing the principal_subject associated with the identity. + // For most identities, the format will be + // `principal://iam.googleapis.com/{identity pool name}/subject/{subject)` + // except for some GKE identities (GKE_WORKLOAD, FREEFORM, GKE_HUB_WORKLOAD) + // that are still in the legacy format `serviceAccount:{identity pool + // name}[{subject}]` + string principal_subject = 3; + + // Entity that creates credentials for service account and assumes its + // identity for authentication. + oneof Authority { + // First party (Google) identity as the real authority. + FirstPartyPrincipal first_party_principal = 1; + + // Third party identity as the real authority. + ThirdPartyPrincipal third_party_principal = 2; + } +} + +// Information related to policy violations for this request. +message PolicyViolationInfo { + // Indicates the orgpolicy violations for this resource. + OrgPolicyViolationInfo org_policy_violation_info = 1; +} + +// Represents OrgPolicy Violation information. +message OrgPolicyViolationInfo { + // Optional. Resource payload that is currently in scope and is subjected to orgpolicy + // conditions. This payload may be the subset of the actual Resource that may + // come in the request. This payload should not contain any core content. + google.protobuf.Struct payload = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Resource type that the orgpolicy is checked against. + // Example: compute.googleapis.com/Instance, store.googleapis.com/bucket + string resource_type = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Tags referenced on the resource at the time of evaluation. These also + // include the federated tags, if they are supplied in the CheckOrgPolicy + // or CheckCustomConstraints Requests. + // + // Optional field as of now. These tags are the Cloud tags that are + // available on the resource during the policy evaluation and will + // be available as part of the OrgPolicy check response for logging purposes. + map resource_tags = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Policy violations + repeated ViolationInfo violation_info = 4 [(google.api.field_behavior) = OPTIONAL]; +} + +// Provides information about the Policy violation info for this request. +message ViolationInfo { + // Policy Type enum + enum PolicyType { + // Default value. This value should not be used. + POLICY_TYPE_UNSPECIFIED = 0; + + // Indicates boolean policy constraint + BOOLEAN_CONSTRAINT = 1; + + // Indicates list policy constraint + LIST_CONSTRAINT = 2; + + // Indicates custom policy constraint + CUSTOM_CONSTRAINT = 3; + } + + // Optional. Constraint name + string constraint = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Error message that policy is indicating. + string error_message = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Value that is being checked for the policy. + // This could be in encrypted form (if pii sensitive). + // This field will only be emitted in LIST_POLICY types + string checked_value = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Indicates the type of the policy. + PolicyType policy_type = 4 [(google.api.field_behavior) = OPTIONAL]; +} \ No newline at end of file diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/cloud/extended_operations.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/cloud/extended_operations.proto new file mode 100644 index 000000000000..1477d2d69a61 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/cloud/extended_operations.proto @@ -0,0 +1,150 @@ +// Copyright 2021 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This file contains custom annotations that are used by GAPIC generators to +// handle Long Running Operation methods (LRO) that are NOT compliant with +// https://google.aip.dev/151. These annotations are public for technical +// reasons only. Please DO NOT USE them in your protos. +syntax = "proto3"; + +package google.cloud; + +import "google/protobuf/descriptor.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/extendedops;extendedops"; +option java_multiple_files = true; +option java_outer_classname = "ExtendedOperationsProto"; +option java_package = "com.google.cloud"; +option objc_class_prefix = "GAPI"; + +// FieldOptions to match corresponding fields in the initial request, +// polling request and operation response messages. +// +// Example: +// +// In an API-specific operation message: +// +// message MyOperation { +// string http_error_message = 1 [(operation_field) = ERROR_MESSAGE]; +// int32 http_error_status_code = 2 [(operation_field) = ERROR_CODE]; +// string id = 3 [(operation_field) = NAME]; +// Status status = 4 [(operation_field) = STATUS]; +// } +// +// In a polling request message (the one which is used to poll for an LRO +// status): +// +// message MyPollingRequest { +// string operation = 1 [(operation_response_field) = "id"]; +// string project = 2; +// string region = 3; +// } +// +// In an initial request message (the one which starts an LRO): +// +// message MyInitialRequest { +// string my_project = 2 [(operation_request_field) = "project"]; +// string my_region = 3 [(operation_request_field) = "region"]; +// } +// +extend google.protobuf.FieldOptions { + // A field annotation that maps fields in an API-specific Operation object to + // their standard counterparts in google.longrunning.Operation. See + // OperationResponseMapping enum definition. + OperationResponseMapping operation_field = 1149; + + // A field annotation that maps fields in the initial request message + // (the one which started the LRO) to their counterparts in the polling + // request message. For non-standard LRO, the polling response may be missing + // some of the information needed to make a subsequent polling request. The + // missing information (for example, project or region ID) is contained in the + // fields of the initial request message that this annotation must be applied + // to. The string value of the annotation corresponds to the name of the + // counterpart field in the polling request message that the annotated field's + // value will be copied to. + string operation_request_field = 1150; + + // A field annotation that maps fields in the polling request message to their + // counterparts in the initial and/or polling response message. The initial + // and the polling methods return an API-specific Operation object. Some of + // the fields from that response object must be reused in the subsequent + // request (like operation name/ID) to fully identify the polled operation. + // This annotation must be applied to the fields in the polling request + // message, the string value of the annotation must correspond to the name of + // the counterpart field in the Operation response object whose value will be + // copied to the annotated field. + string operation_response_field = 1151; +} + +// MethodOptions to identify the actual service and method used for operation +// status polling. +// +// Example: +// +// In a method, which starts an LRO: +// +// service MyService { +// rpc Foo(MyInitialRequest) returns (MyOperation) { +// option (operation_service) = "MyPollingService"; +// } +// } +// +// In a polling method: +// +// service MyPollingService { +// rpc Get(MyPollingRequest) returns (MyOperation) { +// option (operation_polling_method) = true; +// } +// } +extend google.protobuf.MethodOptions { + // A method annotation that maps an LRO method (the one which starts an LRO) + // to the service, which will be used to poll for the operation status. The + // annotation must be applied to the method which starts an LRO, the string + // value of the annotation must correspond to the name of the service used to + // poll for the operation status. + string operation_service = 1249; + + // A method annotation that marks methods that can be used for polling + // operation status (e.g. the MyPollingService.Get(MyPollingRequest) method). + bool operation_polling_method = 1250; +} + +// An enum to be used to mark the essential (for polling) fields in an +// API-specific Operation object. A custom Operation object may contain many +// different fields, but only few of them are essential to conduct a successful +// polling process. +enum OperationResponseMapping { + // Do not use. + UNDEFINED = 0; + + // A field in an API-specific (custom) Operation object which carries the same + // meaning as google.longrunning.Operation.name. + NAME = 1; + + // A field in an API-specific (custom) Operation object which carries the same + // meaning as google.longrunning.Operation.done. If the annotated field is of + // an enum type, `annotated_field_name == EnumType.DONE` semantics should be + // equivalent to `Operation.done == true`. If the annotated field is of type + // boolean, then it should follow the same semantics as Operation.done. + // Otherwise, a non-empty value should be treated as `Operation.done == true`. + STATUS = 2; + + // A field in an API-specific (custom) Operation object which carries the same + // meaning as google.longrunning.Operation.error.code. + ERROR_CODE = 3; + + // A field in an API-specific (custom) Operation object which carries the same + // meaning as google.longrunning.Operation.error.message. + ERROR_MESSAGE = 4; +} \ No newline at end of file diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/cloud/location/locations.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/cloud/location/locations.proto new file mode 100644 index 000000000000..a91766c9d4d4 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/cloud/location/locations.proto @@ -0,0 +1,108 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.location; + +import "google/api/annotations.proto"; +import "google/protobuf/any.proto"; +import "google/api/client.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/cloud/location;location"; +option java_multiple_files = true; +option java_outer_classname = "LocationsProto"; +option java_package = "com.google.cloud.location"; + +// An abstract interface that provides location-related information for +// a service. Service-specific metadata is provided through the +// [Location.metadata][google.cloud.location.Location.metadata] field. +service Locations { + option (google.api.default_host) = "cloud.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Lists information about the supported locations for this service. + rpc ListLocations(ListLocationsRequest) returns (ListLocationsResponse) { + option (google.api.http) = { + get: "/v1/{name=locations}" + additional_bindings { + get: "/v1/{name=projects/*}/locations" + } + }; + } + + // Gets information about a location. + rpc GetLocation(GetLocationRequest) returns (Location) { + option (google.api.http) = { + get: "/v1/{name=locations/*}" + additional_bindings { + get: "/v1/{name=projects/*/locations/*}" + } + }; + } +} + +// The request message for [Locations.ListLocations][google.cloud.location.Locations.ListLocations]. +message ListLocationsRequest { + // The resource that owns the locations collection, if applicable. + string name = 1; + + // The standard list filter. + string filter = 2; + + // The standard list page size. + int32 page_size = 3; + + // The standard list page token. + string page_token = 4; +} + +// The response message for [Locations.ListLocations][google.cloud.location.Locations.ListLocations]. +message ListLocationsResponse { + // A list of locations that matches the specified filter in the request. + repeated Location locations = 1; + + // The standard List next-page token. + string next_page_token = 2; +} + +// The request message for [Locations.GetLocation][google.cloud.location.Locations.GetLocation]. +message GetLocationRequest { + // Resource name for the location. + string name = 1; +} + +// A resource that represents Google Cloud Platform location. +message Location { + // Resource name for the location, which may vary between implementations. + // For example: `"projects/example-project/locations/us-east1"` + string name = 1; + + // The canonical id for this location. For example: `"us-east1"`. + string location_id = 4; + + // The friendly name for this location, typically a nearby city name. + // For example, "Tokyo". + string display_name = 5; + + // Cross-service attributes for the location. For example + // + // {"cloud.googleapis.com/region": "us-east1"} + map labels = 2; + + // Service-specific metadata. For example the available capacity at the given + // location. + google.protobuf.Any metadata = 3; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/geo/type/viewport.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/geo/type/viewport.proto new file mode 100644 index 000000000000..ad5029f8360a --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/geo/type/viewport.proto @@ -0,0 +1,69 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.geo.type; + +import "google/type/latlng.proto"; + +option go_package = "google.golang.org/genproto/googleapis/geo/type/viewport;viewport"; +option java_multiple_files = true; +option java_outer_classname = "ViewportProto"; +option java_package = "com.google.geo.type"; +option objc_class_prefix = "GGTP"; + +// A latitude-longitude viewport, represented as two diagonally opposite `low` +// and `high` points. A viewport is considered a closed region, i.e. it includes +// its boundary. The latitude bounds must range between -90 to 90 degrees +// inclusive, and the longitude bounds must range between -180 to 180 degrees +// inclusive. Various cases include: +// +// - If `low` = `high`, the viewport consists of that single point. +// +// - If `low.longitude` > `high.longitude`, the longitude range is inverted +// (the viewport crosses the 180 degree longitude line). +// +// - If `low.longitude` = -180 degrees and `high.longitude` = 180 degrees, +// the viewport includes all longitudes. +// +// - If `low.longitude` = 180 degrees and `high.longitude` = -180 degrees, +// the longitude range is empty. +// +// - If `low.latitude` > `high.latitude`, the latitude range is empty. +// +// Both `low` and `high` must be populated, and the represented box cannot be +// empty (as specified by the definitions above). An empty viewport will result +// in an error. +// +// For example, this viewport fully encloses New York City: +// +// { +// "low": { +// "latitude": 40.477398, +// "longitude": -74.259087 +// }, +// "high": { +// "latitude": 40.91618, +// "longitude": -73.70018 +// } +// } +message Viewport { + // Required. The low point of the viewport. + google.type.LatLng low = 1; + + // Required. The high point of the viewport. + google.type.LatLng high = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/logging/type/http_request.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/logging/type/http_request.proto new file mode 100644 index 000000000000..425a09d6e9c6 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/logging/type/http_request.proto @@ -0,0 +1,95 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.logging.type; + +import "google/protobuf/duration.proto"; + +option csharp_namespace = "Google.Cloud.Logging.Type"; +option go_package = "google.golang.org/genproto/googleapis/logging/type;ltype"; +option java_multiple_files = true; +option java_outer_classname = "HttpRequestProto"; +option java_package = "com.google.logging.type"; +option php_namespace = "Google\\Cloud\\Logging\\Type"; +option ruby_package = "Google::Cloud::Logging::Type"; + +// A common proto for logging HTTP requests. Only contains semantics +// defined by the HTTP specification. Product-specific logging +// information MUST be defined in a separate message. +message HttpRequest { + // The request method. Examples: `"GET"`, `"HEAD"`, `"PUT"`, `"POST"`. + string request_method = 1; + + // The scheme (http, https), the host name, the path and the query + // portion of the URL that was requested. + // Example: `"http://example.com/some/info?color=red"`. + string request_url = 2; + + // The size of the HTTP request message in bytes, including the request + // headers and the request body. + int64 request_size = 3; + + // The response code indicating the status of response. + // Examples: 200, 404. + int32 status = 4; + + // The size of the HTTP response message sent back to the client, in bytes, + // including the response headers and the response body. + int64 response_size = 5; + + // The user agent sent by the client. Example: + // `"Mozilla/4.0 (compatible; MSIE 6.0; Windows 98; Q312461; .NET + // CLR 1.0.3705)"`. + string user_agent = 6; + + // The IP address (IPv4 or IPv6) of the client that issued the HTTP + // request. This field can include port information. Examples: + // `"192.168.1.1"`, `"10.0.0.1:80"`, `"FE80::0202:B3FF:FE1E:8329"`. + string remote_ip = 7; + + // The IP address (IPv4 or IPv6) of the origin server that the request was + // sent to. This field can include port information. Examples: + // `"192.168.1.1"`, `"10.0.0.1:80"`, `"FE80::0202:B3FF:FE1E:8329"`. + string server_ip = 13; + + // The referer URL of the request, as defined in + // [HTTP/1.1 Header Field + // Definitions](https://datatracker.ietf.org/doc/html/rfc2616#section-14.36). + string referer = 8; + + // The request processing latency on the server, from the time the request was + // received until the response was sent. + google.protobuf.Duration latency = 14; + + // Whether or not a cache lookup was attempted. + bool cache_lookup = 11; + + // Whether or not an entity was served from cache + // (with or without validation). + bool cache_hit = 9; + + // Whether or not the response was validated with the origin server before + // being served from cache. This field is only meaningful if `cache_hit` is + // True. + bool cache_validated_with_origin_server = 10; + + // The number of HTTP response bytes inserted into cache. Set only when a + // cache fill was attempted. + int64 cache_fill_bytes = 12; + + // Protocol used for the request. Examples: "HTTP/1.1", "HTTP/2", "websocket" + string protocol = 15; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/logging/type/log_severity.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/logging/type/log_severity.proto new file mode 100644 index 000000000000..6740125811b0 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/logging/type/log_severity.proto @@ -0,0 +1,71 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.logging.type; + +option csharp_namespace = "Google.Cloud.Logging.Type"; +option go_package = "google.golang.org/genproto/googleapis/logging/type;ltype"; +option java_multiple_files = true; +option java_outer_classname = "LogSeverityProto"; +option java_package = "com.google.logging.type"; +option objc_class_prefix = "GLOG"; +option php_namespace = "Google\\Cloud\\Logging\\Type"; +option ruby_package = "Google::Cloud::Logging::Type"; + +// The severity of the event described in a log entry, expressed as one of the +// standard severity levels listed below. For your reference, the levels are +// assigned the listed numeric values. The effect of using numeric values other +// than those listed is undefined. +// +// You can filter for log entries by severity. For example, the following +// filter expression will match log entries with severities `INFO`, `NOTICE`, +// and `WARNING`: +// +// severity > DEBUG AND severity <= WARNING +// +// If you are writing log entries, you should map other severity encodings to +// one of these standard levels. For example, you might map all of Java's FINE, +// FINER, and FINEST levels to `LogSeverity.DEBUG`. You can preserve the +// original severity level in the log entry payload if you wish. +enum LogSeverity { + // (0) The log entry has no assigned severity level. + DEFAULT = 0; + + // (100) Debug or trace information. + DEBUG = 100; + + // (200) Routine information, such as ongoing status or performance. + INFO = 200; + + // (300) Normal but significant events, such as start up, shut down, or + // a configuration change. + NOTICE = 300; + + // (400) Warning events might cause problems. + WARNING = 400; + + // (500) Error events are likely to cause problems. + ERROR = 500; + + // (600) Critical events cause more severe problems or outages. + CRITICAL = 600; + + // (700) A person must take an action immediately. + ALERT = 700; + + // (800) One or more systems are unusable. + EMERGENCY = 800; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/longrunning/operations.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/longrunning/operations.proto new file mode 100644 index 000000000000..c8fda207e6a8 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/longrunning/operations.proto @@ -0,0 +1,247 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.longrunning; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/empty.proto"; +import "google/rpc/status.proto"; +import "google/protobuf/descriptor.proto"; + +option cc_enable_arenas = true; +option csharp_namespace = "Google.LongRunning"; +option go_package = "cloud.google.com/go/longrunning/autogen/longrunningpb;longrunningpb"; +option java_multiple_files = true; +option java_outer_classname = "OperationsProto"; +option java_package = "com.google.longrunning"; +option php_namespace = "Google\\LongRunning"; + +extend google.protobuf.MethodOptions { + // Additional information regarding long-running operations. + // In particular, this specifies the types that are returned from + // long-running operations. + // + // Required for methods that return `google.longrunning.Operation`; invalid + // otherwise. + google.longrunning.OperationInfo operation_info = 1049; +} + +// Manages long-running operations with an API service. +// +// When an API method normally takes long time to complete, it can be designed +// to return [Operation][google.longrunning.Operation] to the client, and the client can use this +// interface to receive the real response asynchronously by polling the +// operation resource, or pass the operation resource to another API (such as +// Google Cloud Pub/Sub API) to receive the response. Any API service that +// returns long-running operations should implement the `Operations` interface +// so developers can have a consistent client experience. +service Operations { + option (google.api.default_host) = "longrunning.googleapis.com"; + + // Lists operations that match the specified filter in the request. If the + // server doesn't support this method, it returns `UNIMPLEMENTED`. + // + // NOTE: the `name` binding allows API services to override the binding + // to use different resource name schemes, such as `users/*/operations`. To + // override the binding, API services can add a binding such as + // `"/v1/{name=users/*}/operations"` to their service configuration. + // For backwards compatibility, the default name includes the operations + // collection id, however overriding users must ensure the name binding + // is the parent resource, without the operations collection id. + rpc ListOperations(ListOperationsRequest) returns (ListOperationsResponse) { + option (google.api.http) = { + get: "/v1/{name=operations}" + }; + option (google.api.method_signature) = "name,filter"; + } + + // Gets the latest state of a long-running operation. Clients can use this + // method to poll the operation result at intervals as recommended by the API + // service. + rpc GetOperation(GetOperationRequest) returns (Operation) { + option (google.api.http) = { + get: "/v1/{name=operations/**}" + }; + option (google.api.method_signature) = "name"; + } + + // Deletes a long-running operation. This method indicates that the client is + // no longer interested in the operation result. It does not cancel the + // operation. If the server doesn't support this method, it returns + // `google.rpc.Code.UNIMPLEMENTED`. + rpc DeleteOperation(DeleteOperationRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/{name=operations/**}" + }; + option (google.api.method_signature) = "name"; + } + + // Starts asynchronous cancellation on a long-running operation. The server + // makes a best effort to cancel the operation, but success is not + // guaranteed. If the server doesn't support this method, it returns + // `google.rpc.Code.UNIMPLEMENTED`. Clients can use + // [Operations.GetOperation][google.longrunning.Operations.GetOperation] or + // other methods to check whether the cancellation succeeded or whether the + // operation completed despite cancellation. On successful cancellation, + // the operation is not deleted; instead, it becomes an operation with + // an [Operation.error][google.longrunning.Operation.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1, + // corresponding to `Code.CANCELLED`. + rpc CancelOperation(CancelOperationRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v1/{name=operations/**}:cancel" + body: "*" + }; + option (google.api.method_signature) = "name"; + } + + // Waits until the specified long-running operation is done or reaches at most + // a specified timeout, returning the latest state. If the operation is + // already done, the latest state is immediately returned. If the timeout + // specified is greater than the default HTTP/RPC timeout, the HTTP/RPC + // timeout is used. If the server does not support this method, it returns + // `google.rpc.Code.UNIMPLEMENTED`. + // Note that this method is on a best-effort basis. It may return the latest + // state before the specified timeout (including immediately), meaning even an + // immediate response is no guarantee that the operation is done. + rpc WaitOperation(WaitOperationRequest) returns (Operation) { + } +} + +// This resource represents a long-running operation that is the result of a +// network API call. +message Operation { + // The server-assigned name, which is only unique within the same service that + // originally returns it. If you use the default HTTP mapping, the + // `name` should be a resource name ending with `operations/{unique_id}`. + string name = 1; + + // Service-specific metadata associated with the operation. It typically + // contains progress information and common metadata such as create time. + // Some services might not provide such metadata. Any method that returns a + // long-running operation should document the metadata type, if any. + google.protobuf.Any metadata = 2; + + // If the value is `false`, it means the operation is still in progress. + // If `true`, the operation is completed, and either `error` or `response` is + // available. + bool done = 3; + + // The operation result, which can be either an `error` or a valid `response`. + // If `done` == `false`, neither `error` nor `response` is set. + // If `done` == `true`, exactly one of `error` or `response` is set. + oneof result { + // The error result of the operation in case of failure or cancellation. + google.rpc.Status error = 4; + + // The normal response of the operation in case of success. If the original + // method returns no data on success, such as `Delete`, the response is + // `google.protobuf.Empty`. If the original method is standard + // `Get`/`Create`/`Update`, the response should be the resource. For other + // methods, the response should have the type `XxxResponse`, where `Xxx` + // is the original method name. For example, if the original method name + // is `TakeSnapshot()`, the inferred response type is + // `TakeSnapshotResponse`. + google.protobuf.Any response = 5; + } +} + +// The request message for [Operations.GetOperation][google.longrunning.Operations.GetOperation]. +message GetOperationRequest { + // The name of the operation resource. + string name = 1; +} + +// The request message for [Operations.ListOperations][google.longrunning.Operations.ListOperations]. +message ListOperationsRequest { + // The name of the operation's parent resource. + string name = 4; + + // The standard list filter. + string filter = 1; + + // The standard list page size. + int32 page_size = 2; + + // The standard list page token. + string page_token = 3; +} + +// The response message for [Operations.ListOperations][google.longrunning.Operations.ListOperations]. +message ListOperationsResponse { + // A list of operations that matches the specified filter in the request. + repeated Operation operations = 1; + + // The standard List next-page token. + string next_page_token = 2; +} + +// The request message for [Operations.CancelOperation][google.longrunning.Operations.CancelOperation]. +message CancelOperationRequest { + // The name of the operation resource to be cancelled. + string name = 1; +} + +// The request message for [Operations.DeleteOperation][google.longrunning.Operations.DeleteOperation]. +message DeleteOperationRequest { + // The name of the operation resource to be deleted. + string name = 1; +} + +// The request message for [Operations.WaitOperation][google.longrunning.Operations.WaitOperation]. +message WaitOperationRequest { + // The name of the operation resource to wait on. + string name = 1; + + // The maximum duration to wait before timing out. If left blank, the wait + // will be at most the time permitted by the underlying HTTP/RPC protocol. + // If RPC context deadline is also specified, the shorter one will be used. + google.protobuf.Duration timeout = 2; +} + +// A message representing the message types used by a long-running operation. +// +// Example: +// +// rpc LongRunningRecognize(LongRunningRecognizeRequest) +// returns (google.longrunning.Operation) { +// option (google.longrunning.operation_info) = { +// response_type: "LongRunningRecognizeResponse" +// metadata_type: "LongRunningRecognizeMetadata" +// }; +// } +message OperationInfo { + // Required. The message name of the primary return type for this + // long-running operation. + // This type will be used to deserialize the LRO's response. + // + // If the response is in a different package from the rpc, a fully-qualified + // message name must be used (e.g. `google.protobuf.Struct`). + // + // Note: Altering this value constitutes a breaking change. + string response_type = 1; + + // Required. The message name of the metadata type for this long-running + // operation. + // + // If the response is in a different package from the rpc, a fully-qualified + // message name must be used (e.g. `google.protobuf.Struct`). + // + // Note: Altering this value constitutes a breaking change. + string metadata_type = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/rpc/code.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/rpc/code.proto new file mode 100644 index 000000000000..7c810af40f08 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/rpc/code.proto @@ -0,0 +1,186 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.rpc; + +option go_package = "google.golang.org/genproto/googleapis/rpc/code;code"; +option java_multiple_files = true; +option java_outer_classname = "CodeProto"; +option java_package = "com.google.rpc"; +option objc_class_prefix = "RPC"; + +// The canonical error codes for gRPC APIs. +// +// +// Sometimes multiple error codes may apply. Services should return +// the most specific error code that applies. For example, prefer +// `OUT_OF_RANGE` over `FAILED_PRECONDITION` if both codes apply. +// Similarly prefer `NOT_FOUND` or `ALREADY_EXISTS` over `FAILED_PRECONDITION`. +enum Code { + // Not an error; returned on success. + // + // HTTP Mapping: 200 OK + OK = 0; + + // The operation was cancelled, typically by the caller. + // + // HTTP Mapping: 499 Client Closed Request + CANCELLED = 1; + + // Unknown error. For example, this error may be returned when + // a `Status` value received from another address space belongs to + // an error space that is not known in this address space. Also + // errors raised by APIs that do not return enough error information + // may be converted to this error. + // + // HTTP Mapping: 500 Internal Server Error + UNKNOWN = 2; + + // The client specified an invalid argument. Note that this differs + // from `FAILED_PRECONDITION`. `INVALID_ARGUMENT` indicates arguments + // that are problematic regardless of the state of the system + // (e.g., a malformed file name). + // + // HTTP Mapping: 400 Bad Request + INVALID_ARGUMENT = 3; + + // The deadline expired before the operation could complete. For operations + // that change the state of the system, this error may be returned + // even if the operation has completed successfully. For example, a + // successful response from a server could have been delayed long + // enough for the deadline to expire. + // + // HTTP Mapping: 504 Gateway Timeout + DEADLINE_EXCEEDED = 4; + + // Some requested entity (e.g., file or directory) was not found. + // + // Note to server developers: if a request is denied for an entire class + // of users, such as gradual feature rollout or undocumented allowlist, + // `NOT_FOUND` may be used. If a request is denied for some users within + // a class of users, such as user-based access control, `PERMISSION_DENIED` + // must be used. + // + // HTTP Mapping: 404 Not Found + NOT_FOUND = 5; + + // The entity that a client attempted to create (e.g., file or directory) + // already exists. + // + // HTTP Mapping: 409 Conflict + ALREADY_EXISTS = 6; + + // The caller does not have permission to execute the specified + // operation. `PERMISSION_DENIED` must not be used for rejections + // caused by exhausting some resource (use `RESOURCE_EXHAUSTED` + // instead for those errors). `PERMISSION_DENIED` must not be + // used if the caller can not be identified (use `UNAUTHENTICATED` + // instead for those errors). This error code does not imply the + // request is valid or the requested entity exists or satisfies + // other pre-conditions. + // + // HTTP Mapping: 403 Forbidden + PERMISSION_DENIED = 7; + + // The request does not have valid authentication credentials for the + // operation. + // + // HTTP Mapping: 401 Unauthorized + UNAUTHENTICATED = 16; + + // Some resource has been exhausted, perhaps a per-user quota, or + // perhaps the entire file system is out of space. + // + // HTTP Mapping: 429 Too Many Requests + RESOURCE_EXHAUSTED = 8; + + // The operation was rejected because the system is not in a state + // required for the operation's execution. For example, the directory + // to be deleted is non-empty, an rmdir operation is applied to + // a non-directory, etc. + // + // Service implementors can use the following guidelines to decide + // between `FAILED_PRECONDITION`, `ABORTED`, and `UNAVAILABLE`: + // (a) Use `UNAVAILABLE` if the client can retry just the failing call. + // (b) Use `ABORTED` if the client should retry at a higher level. For + // example, when a client-specified test-and-set fails, indicating the + // client should restart a read-modify-write sequence. + // (c) Use `FAILED_PRECONDITION` if the client should not retry until + // the system state has been explicitly fixed. For example, if an "rmdir" + // fails because the directory is non-empty, `FAILED_PRECONDITION` + // should be returned since the client should not retry unless + // the files are deleted from the directory. + // + // HTTP Mapping: 400 Bad Request + FAILED_PRECONDITION = 9; + + // The operation was aborted, typically due to a concurrency issue such as + // a sequencer check failure or transaction abort. + // + // See the guidelines above for deciding between `FAILED_PRECONDITION`, + // `ABORTED`, and `UNAVAILABLE`. + // + // HTTP Mapping: 409 Conflict + ABORTED = 10; + + // The operation was attempted past the valid range. E.g., seeking or + // reading past end-of-file. + // + // Unlike `INVALID_ARGUMENT`, this error indicates a problem that may + // be fixed if the system state changes. For example, a 32-bit file + // system will generate `INVALID_ARGUMENT` if asked to read at an + // offset that is not in the range [0,2^32-1], but it will generate + // `OUT_OF_RANGE` if asked to read from an offset past the current + // file size. + // + // There is a fair bit of overlap between `FAILED_PRECONDITION` and + // `OUT_OF_RANGE`. We recommend using `OUT_OF_RANGE` (the more specific + // error) when it applies so that callers who are iterating through + // a space can easily look for an `OUT_OF_RANGE` error to detect when + // they are done. + // + // HTTP Mapping: 400 Bad Request + OUT_OF_RANGE = 11; + + // The operation is not implemented or is not supported/enabled in this + // service. + // + // HTTP Mapping: 501 Not Implemented + UNIMPLEMENTED = 12; + + // Internal errors. This means that some invariants expected by the + // underlying system have been broken. This error code is reserved + // for serious errors. + // + // HTTP Mapping: 500 Internal Server Error + INTERNAL = 13; + + // The service is currently unavailable. This is most likely a + // transient condition, which can be corrected by retrying with + // a backoff. Note that it is not always safe to retry + // non-idempotent operations. + // + // See the guidelines above for deciding between `FAILED_PRECONDITION`, + // `ABORTED`, and `UNAVAILABLE`. + // + // HTTP Mapping: 503 Service Unavailable + UNAVAILABLE = 14; + + // Unrecoverable data loss or corruption. + // + // HTTP Mapping: 500 Internal Server Error + DATA_LOSS = 15; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/rpc/context/attribute_context.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/rpc/context/attribute_context.proto new file mode 100644 index 000000000000..ef9242e40064 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/rpc/context/attribute_context.proto @@ -0,0 +1,344 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.rpc.context; + +import "google/protobuf/any.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/struct.proto"; +import "google/protobuf/timestamp.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/rpc/context/attribute_context;attribute_context"; +option java_multiple_files = true; +option java_outer_classname = "AttributeContextProto"; +option java_package = "com.google.rpc.context"; + +// This message defines the standard attribute vocabulary for Google APIs. +// +// An attribute is a piece of metadata that describes an activity on a network +// service. For example, the size of an HTTP request, or the status code of +// an HTTP response. +// +// Each attribute has a type and a name, which is logically defined as +// a proto message field in `AttributeContext`. The field type becomes the +// attribute type, and the field path becomes the attribute name. For example, +// the attribute `source.ip` maps to field `AttributeContext.source.ip`. +// +// This message definition is guaranteed not to have any wire breaking change. +// So you can use it directly for passing attributes across different systems. +// +// NOTE: Different system may generate different subset of attributes. Please +// verify the system specification before relying on an attribute generated +// a system. +message AttributeContext { + // This message defines attributes for a node that handles a network request. + // The node can be either a service or an application that sends, forwards, + // or receives the request. Service peers should fill in + // `principal` and `labels` as appropriate. + message Peer { + // The IP address of the peer. + string ip = 1; + + // The network port of the peer. + int64 port = 2; + + // The labels associated with the peer. + map labels = 6; + + // The identity of this peer. Similar to `Request.auth.principal`, but + // relative to the peer instead of the request. For example, the + // identity associated with a load balancer that forwarded the request. + string principal = 7; + + // The CLDR country/region code associated with the above IP address. + // If the IP address is private, the `region_code` should reflect the + // physical location where this peer is running. + string region_code = 8; + } + + // This message defines attributes associated with API operations, such as + // a network API request. The terminology is based on the conventions used + // by Google APIs, Istio, and OpenAPI. + message Api { + // The API service name. It is a logical identifier for a networked API, + // such as "pubsub.googleapis.com". The naming syntax depends on the + // API management system being used for handling the request. + string service = 1; + + // The API operation name. For gRPC requests, it is the fully qualified API + // method name, such as "google.pubsub.v1.Publisher.Publish". For OpenAPI + // requests, it is the `operationId`, such as "getPet". + string operation = 2; + + // The API protocol used for sending the request, such as "http", "https", + // "grpc", or "internal". + string protocol = 3; + + // The API version associated with the API operation above, such as "v1" or + // "v1alpha1". + string version = 4; + } + + // This message defines request authentication attributes. Terminology is + // based on the JSON Web Token (JWT) standard, but the terms also + // correlate to concepts in other standards. + message Auth { + // The authenticated principal. Reflects the issuer (`iss`) and subject + // (`sub`) claims within a JWT. The issuer and subject should be `/` + // delimited, with `/` percent-encoded within the subject fragment. For + // Google accounts, the principal format is: + // "https://accounts.google.com/{id}" + string principal = 1; + + // The intended audience(s) for this authentication information. Reflects + // the audience (`aud`) claim within a JWT. The audience + // value(s) depends on the `issuer`, but typically include one or more of + // the following pieces of information: + // + // * The services intended to receive the credential. For example, + // ["https://pubsub.googleapis.com/", "https://storage.googleapis.com/"]. + // * A set of service-based scopes. For example, + // ["https://www.googleapis.com/auth/cloud-platform"]. + // * The client id of an app, such as the Firebase project id for JWTs + // from Firebase Auth. + // + // Consult the documentation for the credential issuer to determine the + // information provided. + repeated string audiences = 2; + + // The authorized presenter of the credential. Reflects the optional + // Authorized Presenter (`azp`) claim within a JWT or the + // OAuth client id. For example, a Google Cloud Platform client id looks + // as follows: "123456789012.apps.googleusercontent.com". + string presenter = 3; + + // Structured claims presented with the credential. JWTs include + // `{key: value}` pairs for standard and private claims. The following + // is a subset of the standard required and optional claims that would + // typically be presented for a Google-based JWT: + // + // {'iss': 'accounts.google.com', + // 'sub': '113289723416554971153', + // 'aud': ['123456789012', 'pubsub.googleapis.com'], + // 'azp': '123456789012.apps.googleusercontent.com', + // 'email': 'jsmith@example.com', + // 'iat': 1353601026, + // 'exp': 1353604926} + // + // SAML assertions are similarly specified, but with an identity provider + // dependent structure. + google.protobuf.Struct claims = 4; + + // A list of access level resource names that allow resources to be + // accessed by authenticated requester. It is part of Secure GCP processing + // for the incoming request. An access level string has the format: + // "//{api_service_name}/accessPolicies/{policy_id}/accessLevels/{short_name}" + // + // Example: + // "//accesscontextmanager.googleapis.com/accessPolicies/MY_POLICY_ID/accessLevels/MY_LEVEL" + repeated string access_levels = 5; + } + + // This message defines attributes for an HTTP request. If the actual + // request is not an HTTP request, the runtime system should try to map + // the actual request to an equivalent HTTP request. + message Request { + // The unique ID for a request, which can be propagated to downstream + // systems. The ID should have low probability of collision + // within a single day for a specific service. + string id = 1; + + // The HTTP request method, such as `GET`, `POST`. + string method = 2; + + // The HTTP request headers. If multiple headers share the same key, they + // must be merged according to the HTTP spec. All header keys must be + // lowercased, because HTTP header keys are case-insensitive. + map headers = 3; + + // The HTTP URL path, excluding the query parameters. + string path = 4; + + // The HTTP request `Host` header value. + string host = 5; + + // The HTTP URL scheme, such as `http` and `https`. + string scheme = 6; + + // The HTTP URL query in the format of `name1=value1&name2=value2`, as it + // appears in the first line of the HTTP request. No decoding is performed. + string query = 7; + + // The timestamp when the `destination` service receives the last byte of + // the request. + google.protobuf.Timestamp time = 9; + + // The HTTP request size in bytes. If unknown, it must be -1. + int64 size = 10; + + // The network protocol used with the request, such as "http/1.1", + // "spdy/3", "h2", "h2c", "webrtc", "tcp", "udp", "quic". See + // https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids + // for details. + string protocol = 11; + + // A special parameter for request reason. It is used by security systems + // to associate auditing information with a request. + string reason = 12; + + // The request authentication. May be absent for unauthenticated requests. + // Derived from the HTTP request `Authorization` header or equivalent. + Auth auth = 13; + } + + // This message defines attributes for a typical network response. It + // generally models semantics of an HTTP response. + message Response { + // The HTTP response status code, such as `200` and `404`. + int64 code = 1; + + // The HTTP response size in bytes. If unknown, it must be -1. + int64 size = 2; + + // The HTTP response headers. If multiple headers share the same key, they + // must be merged according to HTTP spec. All header keys must be + // lowercased, because HTTP header keys are case-insensitive. + map headers = 3; + + // The timestamp when the `destination` service sends the last byte of + // the response. + google.protobuf.Timestamp time = 4; + + // The amount of time it takes the backend service to fully respond to a + // request. Measured from when the destination service starts to send the + // request to the backend until when the destination service receives the + // complete response from the backend. + google.protobuf.Duration backend_latency = 5; + } + + // This message defines core attributes for a resource. A resource is an + // addressable (named) entity provided by the destination service. For + // example, a file stored on a network storage service. + message Resource { + // The name of the service that this resource belongs to, such as + // `pubsub.googleapis.com`. The service may be different from the DNS + // hostname that actually serves the request. + string service = 1; + + // The stable identifier (name) of a resource on the `service`. A resource + // can be logically identified as "//{resource.service}/{resource.name}". + // The differences between a resource name and a URI are: + // + // * Resource name is a logical identifier, independent of network + // protocol and API version. For example, + // `//pubsub.googleapis.com/projects/123/topics/news-feed`. + // * URI often includes protocol and version information, so it can + // be used directly by applications. For example, + // `https://pubsub.googleapis.com/v1/projects/123/topics/news-feed`. + // + // See https://cloud.google.com/apis/design/resource_names for details. + string name = 2; + + // The type of the resource. The syntax is platform-specific because + // different platforms define their resources differently. + // + // For Google APIs, the type format must be "{service}/{kind}", such as + // "pubsub.googleapis.com/Topic". + string type = 3; + + // The labels or tags on the resource, such as AWS resource tags and + // Kubernetes resource labels. + map labels = 4; + + // The unique identifier of the resource. UID is unique in the time + // and space for this resource within the scope of the service. It is + // typically generated by the server on successful creation of a resource + // and must not be changed. UID is used to uniquely identify resources + // with resource name reuses. This should be a UUID4. + string uid = 5; + + // Annotations is an unstructured key-value map stored with a resource that + // may be set by external tools to store and retrieve arbitrary metadata. + // They are not queryable and should be preserved when modifying objects. + // + // More info: https://kubernetes.io/docs/user-guide/annotations + map annotations = 6; + + // Mutable. The display name set by clients. Must be <= 63 characters. + string display_name = 7; + + // Output only. The timestamp when the resource was created. This may + // be either the time creation was initiated or when it was completed. + google.protobuf.Timestamp create_time = 8; + + // Output only. The timestamp when the resource was last updated. Any + // change to the resource made by users must refresh this value. + // Changes to a resource made by the service should refresh this value. + google.protobuf.Timestamp update_time = 9; + + // Output only. The timestamp when the resource was deleted. + // If the resource is not deleted, this must be empty. + google.protobuf.Timestamp delete_time = 10; + + // Output only. An opaque value that uniquely identifies a version or + // generation of a resource. It can be used to confirm that the client + // and server agree on the ordering of a resource being written. + string etag = 11; + + // Immutable. The location of the resource. The location encoding is + // specific to the service provider, and new encoding may be introduced + // as the service evolves. + // + // For Google Cloud products, the encoding is what is used by Google Cloud + // APIs, such as `us-east1`, `aws-us-east-1`, and `azure-eastus2`. The + // semantics of `location` is identical to the + // `cloud.googleapis.com/location` label used by some Google Cloud APIs. + string location = 12; + } + + // The origin of a network activity. In a multi hop network activity, + // the origin represents the sender of the first hop. For the first hop, + // the `source` and the `origin` must have the same content. + Peer origin = 7; + + // The source of a network activity, such as starting a TCP connection. + // In a multi hop network activity, the source represents the sender of the + // last hop. + Peer source = 1; + + // The destination of a network activity, such as accepting a TCP connection. + // In a multi hop network activity, the destination represents the receiver of + // the last hop. + Peer destination = 2; + + // Represents a network request, such as an HTTP request. + Request request = 3; + + // Represents a network response, such as an HTTP response. + Response response = 4; + + // Represents a target resource that is involved with a network activity. + // If multiple resources are involved with an activity, this must be the + // primary one. + Resource resource = 5; + + // Represents an API operation that is involved to a network activity. + Api api = 6; + + // Supports extensions for advanced use cases, such as logs and metrics. + repeated google.protobuf.Any extensions = 8; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/rpc/context/audit_context.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/rpc/context/audit_context.proto new file mode 100644 index 000000000000..7b8b7051f4c1 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/rpc/context/audit_context.proto @@ -0,0 +1,49 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.rpc.context; + +import "google/protobuf/struct.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/rpc/context;context"; +option java_multiple_files = true; +option java_outer_classname = "AuditContextProto"; +option java_package = "com.google.rpc.context"; + +// `AuditContext` provides information that is needed for audit logging. +message AuditContext { + // Serialized audit log. + bytes audit_log = 1; + + // An API request message that is scrubbed based on the method annotation. + // This field should only be filled if audit_log field is present. + // Service Control will use this to assemble a complete log for Cloud Audit + // Logs and Google internal audit logs. + google.protobuf.Struct scrubbed_request = 2; + + // An API response message that is scrubbed based on the method annotation. + // This field should only be filled if audit_log field is present. + // Service Control will use this to assemble a complete log for Cloud Audit + // Logs and Google internal audit logs. + google.protobuf.Struct scrubbed_response = 3; + + // Number of scrubbed response items. + int32 scrubbed_response_item_count = 4; + + // Audit resource name which is scrubbed. + string target_resource = 5; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/rpc/error_details.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/rpc/error_details.proto new file mode 100644 index 000000000000..c489e83142ab --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/rpc/error_details.proto @@ -0,0 +1,285 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.rpc; + +import "google/protobuf/duration.proto"; + +option go_package = "google.golang.org/genproto/googleapis/rpc/errdetails;errdetails"; +option java_multiple_files = true; +option java_outer_classname = "ErrorDetailsProto"; +option java_package = "com.google.rpc"; +option objc_class_prefix = "RPC"; + +// Describes the cause of the error with structured details. +// +// Example of an error when contacting the "pubsub.googleapis.com" API when it +// is not enabled: +// +// { "reason": "API_DISABLED" +// "domain": "googleapis.com" +// "metadata": { +// "resource": "projects/123", +// "service": "pubsub.googleapis.com" +// } +// } +// +// This response indicates that the pubsub.googleapis.com API is not enabled. +// +// Example of an error that is returned when attempting to create a Spanner +// instance in a region that is out of stock: +// +// { "reason": "STOCKOUT" +// "domain": "spanner.googleapis.com", +// "metadata": { +// "availableRegions": "us-central1,us-east2" +// } +// } +message ErrorInfo { + // The reason of the error. This is a constant value that identifies the + // proximate cause of the error. Error reasons are unique within a particular + // domain of errors. This should be at most 63 characters and match a + // regular expression of `[A-Z][A-Z0-9_]+[A-Z0-9]`, which represents + // UPPER_SNAKE_CASE. + string reason = 1; + + // The logical grouping to which the "reason" belongs. The error domain + // is typically the registered service name of the tool or product that + // generates the error. Example: "pubsub.googleapis.com". If the error is + // generated by some common infrastructure, the error domain must be a + // globally unique value that identifies the infrastructure. For Google API + // infrastructure, the error domain is "googleapis.com". + string domain = 2; + + // Additional structured details about this error. + // + // Keys should match /[a-zA-Z0-9-_]/ and be limited to 64 characters in + // length. When identifying the current value of an exceeded limit, the units + // should be contained in the key, not the value. For example, rather than + // {"instanceLimit": "100/request"}, should be returned as, + // {"instanceLimitPerRequest": "100"}, if the client exceeds the number of + // instances that can be created in a single (batch) request. + map metadata = 3; +} + +// Describes when the clients can retry a failed request. Clients could ignore +// the recommendation here or retry when this information is missing from error +// responses. +// +// It's always recommended that clients should use exponential backoff when +// retrying. +// +// Clients should wait until `retry_delay` amount of time has passed since +// receiving the error response before retrying. If retrying requests also +// fail, clients should use an exponential backoff scheme to gradually increase +// the delay between retries based on `retry_delay`, until either a maximum +// number of retries have been reached or a maximum retry delay cap has been +// reached. +message RetryInfo { + // Clients should wait at least this long between retrying the same request. + google.protobuf.Duration retry_delay = 1; +} + +// Describes additional debugging info. +message DebugInfo { + // The stack trace entries indicating where the error occurred. + repeated string stack_entries = 1; + + // Additional debugging information provided by the server. + string detail = 2; +} + +// Describes how a quota check failed. +// +// For example if a daily limit was exceeded for the calling project, +// a service could respond with a QuotaFailure detail containing the project +// id and the description of the quota limit that was exceeded. If the +// calling project hasn't enabled the service in the developer console, then +// a service could respond with the project id and set `service_disabled` +// to true. +// +// Also see RetryInfo and Help types for other details about handling a +// quota failure. +message QuotaFailure { + // A message type used to describe a single quota violation. For example, a + // daily quota or a custom quota that was exceeded. + message Violation { + // The subject on which the quota check failed. + // For example, "clientip:" or "project:". + string subject = 1; + + // A description of how the quota check failed. Clients can use this + // description to find more about the quota configuration in the service's + // public documentation, or find the relevant quota limit to adjust through + // developer console. + // + // For example: "Service disabled" or "Daily Limit for read operations + // exceeded". + string description = 2; + } + + // Describes all quota violations. + repeated Violation violations = 1; +} + +// Describes what preconditions have failed. +// +// For example, if an RPC failed because it required the Terms of Service to be +// acknowledged, it could list the terms of service violation in the +// PreconditionFailure message. +message PreconditionFailure { + // A message type used to describe a single precondition failure. + message Violation { + // The type of PreconditionFailure. We recommend using a service-specific + // enum type to define the supported precondition violation subjects. For + // example, "TOS" for "Terms of Service violation". + string type = 1; + + // The subject, relative to the type, that failed. + // For example, "google.com/cloud" relative to the "TOS" type would indicate + // which terms of service is being referenced. + string subject = 2; + + // A description of how the precondition failed. Developers can use this + // description to understand how to fix the failure. + // + // For example: "Terms of service not accepted". + string description = 3; + } + + // Describes all precondition violations. + repeated Violation violations = 1; +} + +// Describes violations in a client request. This error type focuses on the +// syntactic aspects of the request. +message BadRequest { + // A message type used to describe a single bad request field. + message FieldViolation { + // A path that leads to a field in the request body. The value will be a + // sequence of dot-separated identifiers that identify a protocol buffer + // field. + // + // Consider the following: + // + // message CreateContactRequest { + // message EmailAddress { + // enum Type { + // TYPE_UNSPECIFIED = 0; + // HOME = 1; + // WORK = 2; + // } + // + // optional string email = 1; + // repeated EmailType type = 2; + // } + // + // string full_name = 1; + // repeated EmailAddress email_addresses = 2; + // } + // + // In this example, in proto `field` could take one of the following values: + // + // * `full_name` for a violation in the `full_name` value + // * `email_addresses[1].email` for a violation in the `email` field of the + // first `email_addresses` message + // * `email_addresses[3].type[2]` for a violation in the second `type` + // value in the third `email_addresses` message. + // + // In JSON, the same values are represented as: + // + // * `fullName` for a violation in the `fullName` value + // * `emailAddresses[1].email` for a violation in the `email` field of the + // first `emailAddresses` message + // * `emailAddresses[3].type[2]` for a violation in the second `type` + // value in the third `emailAddresses` message. + string field = 1; + + // A description of why the request element is bad. + string description = 2; + } + + // Describes all violations in a client request. + repeated FieldViolation field_violations = 1; +} + +// Contains metadata about the request that clients can attach when filing a bug +// or providing other forms of feedback. +message RequestInfo { + // An opaque string that should only be interpreted by the service generating + // it. For example, it can be used to identify requests in the service's logs. + string request_id = 1; + + // Any data that was used to serve this request. For example, an encrypted + // stack trace that can be sent back to the service provider for debugging. + string serving_data = 2; +} + +// Describes the resource that is being accessed. +message ResourceInfo { + // A name for the type of resource being accessed, e.g. "sql table", + // "cloud storage bucket", "file", "Google calendar"; or the type URL + // of the resource: e.g. "type.googleapis.com/google.pubsub.v1.Topic". + string resource_type = 1; + + // The name of the resource being accessed. For example, a shared calendar + // name: "example.com_4fghdhgsrgh@group.calendar.google.com", if the current + // error is + // [google.rpc.Code.PERMISSION_DENIED][google.rpc.Code.PERMISSION_DENIED]. + string resource_name = 2; + + // The owner of the resource (optional). + // For example, "user:" or "project:". + string owner = 3; + + // Describes what error is encountered when accessing this resource. + // For example, updating a cloud project may require the `writer` permission + // on the developer console project. + string description = 4; +} + +// Provides links to documentation or for performing an out of band action. +// +// For example, if a quota check failed with an error indicating the calling +// project hasn't enabled the accessed service, this can contain a URL pointing +// directly to the right place in the developer console to flip the bit. +message Help { + // Describes a URL link. + message Link { + // Describes what the link offers. + string description = 1; + + // The URL of the link. + string url = 2; + } + + // URL(s) pointing to additional information on handling the current error. + repeated Link links = 1; +} + +// Provides a localized error message that is safe to return to the user +// which can be attached to an RPC error. +message LocalizedMessage { + // The locale used following the specification defined at + // https://www.rfc-editor.org/rfc/bcp/bcp47.txt. + // Examples are: "en-US", "fr-CH", "es-MX" + string locale = 1; + + // The localized error message in the above locale. + string message = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/rpc/status.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/rpc/status.proto new file mode 100644 index 000000000000..923e169381a7 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/rpc/status.proto @@ -0,0 +1,49 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.rpc; + +import "google/protobuf/any.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/rpc/status;status"; +option java_multiple_files = true; +option java_outer_classname = "StatusProto"; +option java_package = "com.google.rpc"; +option objc_class_prefix = "RPC"; + +// The `Status` type defines a logical error model that is suitable for +// different programming environments, including REST APIs and RPC APIs. It is +// used by [gRPC](https://github.com/grpc). Each `Status` message contains +// three pieces of data: error code, error message, and error details. +// +// You can find out more about this error model and how to work with it in the +// [API Design Guide](https://cloud.google.com/apis/design/errors). +message Status { + // The status code, which should be an enum value of + // [google.rpc.Code][google.rpc.Code]. + int32 code = 1; + + // A developer-facing error message, which should be in English. Any + // user-facing error message should be localized and sent in the + // [google.rpc.Status.details][google.rpc.Status.details] field, or localized + // by the client. + string message = 2; + + // A list of messages that carry the error details. There is a common set of + // message types for APIs to use. + repeated google.protobuf.Any details = 3; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/shopping/type/types.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/shopping/type/types.proto new file mode 100644 index 000000000000..f2e842724a8a --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/shopping/type/types.proto @@ -0,0 +1,161 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.shopping.type; + +option csharp_namespace = "Google.Shopping.Type"; +option go_package = "cloud.google.com/go/shopping/type/typepb;typepb"; +option java_multiple_files = true; +option java_outer_classname = "TypesProto"; +option java_package = "com.google.shopping.type"; + +// The price represented as a number and currency. +message Price { + // The price represented as a number in micros (1 million micros is an + // equivalent to one's currency standard unit, for example, 1 USD = 1000000 + // micros). + // This field can also be set as infinity by setting to -1. + // This field only support -1 and positive value. + optional int64 amount_micros = 1; + + // The currency of the price using three-letter acronyms according to [ISO + // 4217](http://en.wikipedia.org/wiki/ISO_4217). + optional string currency_code = 2; +} + +// A message that represents custom attributes. Exactly one of `value` or +// `group_values` must not be empty. +message CustomAttribute { + // The name of the attribute. + optional string name = 1; + + // The value of the attribute. If `value` is not empty, `group_values` must be + // empty. + optional string value = 2; + + // Subattributes within this attribute group. If + // `group_values` is not empty, `value` must be empty. + repeated CustomAttribute group_values = 3; +} + +// Destinations available for a product. +// +// Destinations are used in Merchant Center to allow you to control where the +// products from your data feed should be displayed. +// +message Destination { + // Destination values. + enum DestinationEnum { + // Not specified. + DESTINATION_ENUM_UNSPECIFIED = 0; + + // [Shopping ads](https://support.google.com/google-ads/answer/2454022). + SHOPPING_ADS = 1; + + // [Display ads](https://support.google.com/merchants/answer/6069387). + DISPLAY_ADS = 2; + + // [Local inventory + // ads](https://support.google.com/merchants/answer/3057972). + LOCAL_INVENTORY_ADS = 3; + + // [Free listings](https://support.google.com/merchants/answer/9199328). + FREE_LISTINGS = 4; + + // [Free local product + // listings](https://support.google.com/merchants/answer/9825611). + FREE_LOCAL_LISTINGS = 5; + + // [YouTube Shopping](https://support.google.com/merchants/answer/12362804). + YOUTUBE_SHOPPING = 6; + } +} + +// Reporting contexts that your account and product issues apply to. +// +// Reporting contexts are groups of surfaces and formats for product results on +// Google. They can represent the entire destination (for example, [Shopping +// ads](https://support.google.com/merchants/answer/6149970)) or a subset of +// formats within a destination (for example, [Discovery +// ads](https://support.google.com/merchants/answer/13389785)). +// +message ReportingContext { + // Reporting context values. + enum ReportingContextEnum { + // Not specified. + REPORTING_CONTEXT_ENUM_UNSPECIFIED = 0; + + // [Shopping ads](https://support.google.com/merchants/answer/6149970). + SHOPPING_ADS = 1; + + // [Discovery and Demand Gen + // ads](https://support.google.com/merchants/answer/13389785). + DISCOVERY_ADS = 2; + + // [Video ads](https://support.google.com/google-ads/answer/6340491). + VIDEO_ADS = 3; + + // [Display ads](https://support.google.com/merchants/answer/6069387). + DISPLAY_ADS = 4; + + // [Local inventory + // ads](https://support.google.com/merchants/answer/3271956). + LOCAL_INVENTORY_ADS = 5; + + // [Vehicle inventory + // ads](https://support.google.com/merchants/answer/11544533). + VEHICLE_INVENTORY_ADS = 6; + + // [Free product + // listings](https://support.google.com/merchants/answer/9199328). + FREE_LISTINGS = 7; + + // [Free local product + // listings](https://support.google.com/merchants/answer/9825611). + FREE_LOCAL_LISTINGS = 8; + + // [Free local vehicle + // listings](https://support.google.com/merchants/answer/11544533). + FREE_LOCAL_VEHICLE_LISTINGS = 9; + + // [YouTube + // Shopping](https://support.google.com/merchants/answer/13478370). + YOUTUBE_SHOPPING = 10; + + // [Cloud retail](https://cloud.google.com/solutions/retail). + CLOUD_RETAIL = 11; + + // [Local cloud retail](https://cloud.google.com/solutions/retail). + LOCAL_CLOUD_RETAIL = 12; + } +} + +// [Channel](https://support.google.com/merchants/answer/7361332) of a product. +// +// Channel is used to distinguish between online and local products. +message Channel { + // Channel values. + enum ChannelEnum { + // Not specified. + CHANNEL_ENUM_UNSPECIFIED = 0; + + // Online product. + ONLINE = 1; + + // Local product. + LOCAL = 2; + } +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/calendar_period.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/calendar_period.proto new file mode 100644 index 000000000000..82f5690b752b --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/calendar_period.proto @@ -0,0 +1,56 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option go_package = "google.golang.org/genproto/googleapis/type/calendarperiod;calendarperiod"; +option java_multiple_files = true; +option java_outer_classname = "CalendarPeriodProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// A `CalendarPeriod` represents the abstract concept of a time period that has +// a canonical start. Grammatically, "the start of the current +// `CalendarPeriod`." All calendar times begin at midnight UTC. +enum CalendarPeriod { + // Undefined period, raises an error. + CALENDAR_PERIOD_UNSPECIFIED = 0; + + // A day. + DAY = 1; + + // A week. Weeks begin on Monday, following + // [ISO 8601](https://en.wikipedia.org/wiki/ISO_week_date). + WEEK = 2; + + // A fortnight. The first calendar fortnight of the year begins at the start + // of week 1 according to + // [ISO 8601](https://en.wikipedia.org/wiki/ISO_week_date). + FORTNIGHT = 3; + + // A month. + MONTH = 4; + + // A quarter. Quarters start on dates 1-Jan, 1-Apr, 1-Jul, and 1-Oct of each + // year. + QUARTER = 5; + + // A half-year. Half-years start on dates 1-Jan and 1-Jul. + HALF = 6; + + // A year. + YEAR = 7; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/color.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/color.proto new file mode 100644 index 000000000000..5dc85a6a3856 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/color.proto @@ -0,0 +1,174 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +import "google/protobuf/wrappers.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/color;color"; +option java_multiple_files = true; +option java_outer_classname = "ColorProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents a color in the RGBA color space. This representation is designed +// for simplicity of conversion to/from color representations in various +// languages over compactness. For example, the fields of this representation +// can be trivially provided to the constructor of `java.awt.Color` in Java; it +// can also be trivially provided to UIColor's `+colorWithRed:green:blue:alpha` +// method in iOS; and, with just a little work, it can be easily formatted into +// a CSS `rgba()` string in JavaScript. +// +// This reference page doesn't carry information about the absolute color +// space +// that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB, +// DCI-P3, BT.2020, etc.). By default, applications should assume the sRGB color +// space. +// +// When color equality needs to be decided, implementations, unless +// documented otherwise, treat two colors as equal if all their red, +// green, blue, and alpha values each differ by at most 1e-5. +// +// Example (Java): +// +// import com.google.type.Color; +// +// // ... +// public static java.awt.Color fromProto(Color protocolor) { +// float alpha = protocolor.hasAlpha() +// ? protocolor.getAlpha().getValue() +// : 1.0; +// +// return new java.awt.Color( +// protocolor.getRed(), +// protocolor.getGreen(), +// protocolor.getBlue(), +// alpha); +// } +// +// public static Color toProto(java.awt.Color color) { +// float red = (float) color.getRed(); +// float green = (float) color.getGreen(); +// float blue = (float) color.getBlue(); +// float denominator = 255.0; +// Color.Builder resultBuilder = +// Color +// .newBuilder() +// .setRed(red / denominator) +// .setGreen(green / denominator) +// .setBlue(blue / denominator); +// int alpha = color.getAlpha(); +// if (alpha != 255) { +// result.setAlpha( +// FloatValue +// .newBuilder() +// .setValue(((float) alpha) / denominator) +// .build()); +// } +// return resultBuilder.build(); +// } +// // ... +// +// Example (iOS / Obj-C): +// +// // ... +// static UIColor* fromProto(Color* protocolor) { +// float red = [protocolor red]; +// float green = [protocolor green]; +// float blue = [protocolor blue]; +// FloatValue* alpha_wrapper = [protocolor alpha]; +// float alpha = 1.0; +// if (alpha_wrapper != nil) { +// alpha = [alpha_wrapper value]; +// } +// return [UIColor colorWithRed:red green:green blue:blue alpha:alpha]; +// } +// +// static Color* toProto(UIColor* color) { +// CGFloat red, green, blue, alpha; +// if (![color getRed:&red green:&green blue:&blue alpha:&alpha]) { +// return nil; +// } +// Color* result = [[Color alloc] init]; +// [result setRed:red]; +// [result setGreen:green]; +// [result setBlue:blue]; +// if (alpha <= 0.9999) { +// [result setAlpha:floatWrapperWithValue(alpha)]; +// } +// [result autorelease]; +// return result; +// } +// // ... +// +// Example (JavaScript): +// +// // ... +// +// var protoToCssColor = function(rgb_color) { +// var redFrac = rgb_color.red || 0.0; +// var greenFrac = rgb_color.green || 0.0; +// var blueFrac = rgb_color.blue || 0.0; +// var red = Math.floor(redFrac * 255); +// var green = Math.floor(greenFrac * 255); +// var blue = Math.floor(blueFrac * 255); +// +// if (!('alpha' in rgb_color)) { +// return rgbToCssColor(red, green, blue); +// } +// +// var alphaFrac = rgb_color.alpha.value || 0.0; +// var rgbParams = [red, green, blue].join(','); +// return ['rgba(', rgbParams, ',', alphaFrac, ')'].join(''); +// }; +// +// var rgbToCssColor = function(red, green, blue) { +// var rgbNumber = new Number((red << 16) | (green << 8) | blue); +// var hexString = rgbNumber.toString(16); +// var missingZeros = 6 - hexString.length; +// var resultBuilder = ['#']; +// for (var i = 0; i < missingZeros; i++) { +// resultBuilder.push('0'); +// } +// resultBuilder.push(hexString); +// return resultBuilder.join(''); +// }; +// +// // ... +message Color { + // The amount of red in the color as a value in the interval [0, 1]. + float red = 1; + + // The amount of green in the color as a value in the interval [0, 1]. + float green = 2; + + // The amount of blue in the color as a value in the interval [0, 1]. + float blue = 3; + + // The fraction of this color that should be applied to the pixel. That is, + // the final pixel color is defined by the equation: + // + // `pixel color = alpha * (this color) + (1.0 - alpha) * (background color)` + // + // This means that a value of 1.0 corresponds to a solid color, whereas + // a value of 0.0 corresponds to a completely transparent color. This + // uses a wrapper message rather than a simple float scalar so that it is + // possible to distinguish between a default value and the value being unset. + // If omitted, this color object is rendered as a solid color + // (as if the alpha value had been explicitly given a value of 1.0). + google.protobuf.FloatValue alpha = 4; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/date.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/date.proto new file mode 100644 index 000000000000..e4e730e6f5a9 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/date.proto @@ -0,0 +1,52 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/date;date"; +option java_multiple_files = true; +option java_outer_classname = "DateProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents a whole or partial calendar date, such as a birthday. The time of +// day and time zone are either specified elsewhere or are insignificant. The +// date is relative to the Gregorian Calendar. This can represent one of the +// following: +// +// * A full date, with non-zero year, month, and day values +// * A month and day value, with a zero year, such as an anniversary +// * A year on its own, with zero month and day values +// * A year and month value, with a zero day, such as a credit card expiration +// date +// +// Related types are [google.type.TimeOfDay][google.type.TimeOfDay] and +// `google.protobuf.Timestamp`. +message Date { + // Year of the date. Must be from 1 to 9999, or 0 to specify a date without + // a year. + int32 year = 1; + + // Month of a year. Must be from 1 to 12, or 0 to specify a year without a + // month and day. + int32 month = 2; + + // Day of a month. Must be from 1 to 31 and valid for the year and month, or 0 + // to specify a year by itself or a year and month where the day isn't + // significant. + int32 day = 3; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/datetime.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/datetime.proto new file mode 100644 index 000000000000..cfed85d70a10 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/datetime.proto @@ -0,0 +1,104 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +import "google/protobuf/duration.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/datetime;datetime"; +option java_multiple_files = true; +option java_outer_classname = "DateTimeProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents civil time (or occasionally physical time). +// +// This type can represent a civil time in one of a few possible ways: +// +// * When utc_offset is set and time_zone is unset: a civil time on a calendar +// day with a particular offset from UTC. +// * When time_zone is set and utc_offset is unset: a civil time on a calendar +// day in a particular time zone. +// * When neither time_zone nor utc_offset is set: a civil time on a calendar +// day in local time. +// +// The date is relative to the Proleptic Gregorian Calendar. +// +// If year is 0, the DateTime is considered not to have a specific year. month +// and day must have valid, non-zero values. +// +// This type may also be used to represent a physical time if all the date and +// time fields are set and either case of the `time_offset` oneof is set. +// Consider using `Timestamp` message for physical time instead. If your use +// case also would like to store the user's timezone, that can be done in +// another field. +// +// This type is more flexible than some applications may want. Make sure to +// document and validate your application's limitations. +message DateTime { + // Optional. Year of date. Must be from 1 to 9999, or 0 if specifying a + // datetime without a year. + int32 year = 1; + + // Required. Month of year. Must be from 1 to 12. + int32 month = 2; + + // Required. Day of month. Must be from 1 to 31 and valid for the year and + // month. + int32 day = 3; + + // Required. Hours of day in 24 hour format. Should be from 0 to 23. An API + // may choose to allow the value "24:00:00" for scenarios like business + // closing time. + int32 hours = 4; + + // Required. Minutes of hour of day. Must be from 0 to 59. + int32 minutes = 5; + + // Required. Seconds of minutes of the time. Must normally be from 0 to 59. An + // API may allow the value 60 if it allows leap-seconds. + int32 seconds = 6; + + // Required. Fractions of seconds in nanoseconds. Must be from 0 to + // 999,999,999. + int32 nanos = 7; + + // Optional. Specifies either the UTC offset or the time zone of the DateTime. + // Choose carefully between them, considering that time zone data may change + // in the future (for example, a country modifies their DST start/end dates, + // and future DateTimes in the affected range had already been stored). + // If omitted, the DateTime is considered to be in local time. + oneof time_offset { + // UTC offset. Must be whole seconds, between -18 hours and +18 hours. + // For example, a UTC offset of -4:00 would be represented as + // { seconds: -14400 }. + google.protobuf.Duration utc_offset = 8; + + // Time zone. + TimeZone time_zone = 9; + } +} + +// Represents a time zone from the +// [IANA Time Zone Database](https://www.iana.org/time-zones). +message TimeZone { + // IANA Time Zone Database time zone, e.g. "America/New_York". + string id = 1; + + // Optional. IANA Time Zone Database version number, e.g. "2019a". + string version = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/dayofweek.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/dayofweek.proto new file mode 100644 index 000000000000..4c80c62ec0b4 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/dayofweek.proto @@ -0,0 +1,50 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option go_package = "google.golang.org/genproto/googleapis/type/dayofweek;dayofweek"; +option java_multiple_files = true; +option java_outer_classname = "DayOfWeekProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents a day of the week. +enum DayOfWeek { + // The day of the week is unspecified. + DAY_OF_WEEK_UNSPECIFIED = 0; + + // Monday + MONDAY = 1; + + // Tuesday + TUESDAY = 2; + + // Wednesday + WEDNESDAY = 3; + + // Thursday + THURSDAY = 4; + + // Friday + FRIDAY = 5; + + // Saturday + SATURDAY = 6; + + // Sunday + SUNDAY = 7; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/decimal.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/decimal.proto new file mode 100644 index 000000000000..beb18a5d8dd2 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/decimal.proto @@ -0,0 +1,95 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/decimal;decimal"; +option java_multiple_files = true; +option java_outer_classname = "DecimalProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// A representation of a decimal value, such as 2.5. Clients may convert values +// into language-native decimal formats, such as Java's [BigDecimal][] or +// Python's [decimal.Decimal][]. +// +// [BigDecimal]: +// https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/math/BigDecimal.html +// [decimal.Decimal]: https://docs.python.org/3/library/decimal.html +message Decimal { + // The decimal value, as a string. + // + // The string representation consists of an optional sign, `+` (`U+002B`) + // or `-` (`U+002D`), followed by a sequence of zero or more decimal digits + // ("the integer"), optionally followed by a fraction, optionally followed + // by an exponent. + // + // The fraction consists of a decimal point followed by zero or more decimal + // digits. The string must contain at least one digit in either the integer + // or the fraction. The number formed by the sign, the integer and the + // fraction is referred to as the significand. + // + // The exponent consists of the character `e` (`U+0065`) or `E` (`U+0045`) + // followed by one or more decimal digits. + // + // Services **should** normalize decimal values before storing them by: + // + // - Removing an explicitly-provided `+` sign (`+2.5` -> `2.5`). + // - Replacing a zero-length integer value with `0` (`.5` -> `0.5`). + // - Coercing the exponent character to lower-case (`2.5E8` -> `2.5e8`). + // - Removing an explicitly-provided zero exponent (`2.5e0` -> `2.5`). + // + // Services **may** perform additional normalization based on its own needs + // and the internal decimal implementation selected, such as shifting the + // decimal point and exponent value together (example: `2.5e-1` <-> `0.25`). + // Additionally, services **may** preserve trailing zeroes in the fraction + // to indicate increased precision, but are not required to do so. + // + // Note that only the `.` character is supported to divide the integer + // and the fraction; `,` **should not** be supported regardless of locale. + // Additionally, thousand separators **should not** be supported. If a + // service does support them, values **must** be normalized. + // + // The ENBF grammar is: + // + // DecimalString = + // [Sign] Significand [Exponent]; + // + // Sign = '+' | '-'; + // + // Significand = + // Digits ['.'] [Digits] | [Digits] '.' Digits; + // + // Exponent = ('e' | 'E') [Sign] Digits; + // + // Digits = { '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' }; + // + // Services **should** clearly document the range of supported values, the + // maximum supported precision (total number of digits), and, if applicable, + // the scale (number of digits after the decimal point), as well as how it + // behaves when receiving out-of-bounds values. + // + // Services **may** choose to accept values passed as input even when the + // value has a higher precision or scale than the service supports, and + // **should** round the value to fit the supported scale. Alternatively, the + // service **may** error with `400 Bad Request` (`INVALID_ARGUMENT` in gRPC) + // if precision would be lost. + // + // Services **should** error with `400 Bad Request` (`INVALID_ARGUMENT` in + // gRPC) if the service receives a value outside of the supported range. + string value = 1; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/expr.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/expr.proto new file mode 100644 index 000000000000..af0778cf958c --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/expr.proto @@ -0,0 +1,73 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option go_package = "google.golang.org/genproto/googleapis/type/expr;expr"; +option java_multiple_files = true; +option java_outer_classname = "ExprProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents a textual expression in the Common Expression Language (CEL) +// syntax. CEL is a C-like expression language. The syntax and semantics of CEL +// are documented at https://github.com/google/cel-spec. +// +// Example (Comparison): +// +// title: "Summary size limit" +// description: "Determines if a summary is less than 100 chars" +// expression: "document.summary.size() < 100" +// +// Example (Equality): +// +// title: "Requestor is owner" +// description: "Determines if requestor is the document owner" +// expression: "document.owner == request.auth.claims.email" +// +// Example (Logic): +// +// title: "Public documents" +// description: "Determine whether the document should be publicly visible" +// expression: "document.type != 'private' && document.type != 'internal'" +// +// Example (Data Manipulation): +// +// title: "Notification string" +// description: "Create a notification string with a timestamp." +// expression: "'New message received at ' + string(document.create_time)" +// +// The exact variables and functions that may be referenced within an expression +// are determined by the service that evaluates it. See the service +// documentation for additional information. +message Expr { + // Textual representation of an expression in Common Expression Language + // syntax. + string expression = 1; + + // Optional. Title for the expression, i.e. a short string describing + // its purpose. This can be used e.g. in UIs which allow to enter the + // expression. + string title = 2; + + // Optional. Description of the expression. This is a longer text which + // describes the expression, e.g. when hovered over it in a UI. + string description = 3; + + // Optional. String indicating the location of the expression for error + // reporting, e.g. a file name and a position in the file. + string location = 4; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/fraction.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/fraction.proto new file mode 100644 index 000000000000..6c5ae6e2a25d --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/fraction.proto @@ -0,0 +1,33 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option go_package = "google.golang.org/genproto/googleapis/type/fraction;fraction"; +option java_multiple_files = true; +option java_outer_classname = "FractionProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents a fraction in terms of a numerator divided by a denominator. +message Fraction { + // The numerator in the fraction, e.g. 2 in 2/3. + int64 numerator = 1; + + // The value by which the numerator is divided, e.g. 3 in 2/3. Must be + // positive. + int64 denominator = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/interval.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/interval.proto new file mode 100644 index 000000000000..9702324cd4e8 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/interval.proto @@ -0,0 +1,46 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +import "google/protobuf/timestamp.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/interval;interval"; +option java_multiple_files = true; +option java_outer_classname = "IntervalProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents a time interval, encoded as a Timestamp start (inclusive) and a +// Timestamp end (exclusive). +// +// The start must be less than or equal to the end. +// When the start equals the end, the interval is empty (matches no time). +// When both start and end are unspecified, the interval matches any time. +message Interval { + // Optional. Inclusive start of the interval. + // + // If specified, a Timestamp matching this interval will have to be the same + // or after the start. + google.protobuf.Timestamp start_time = 1; + + // Optional. Exclusive end of the interval. + // + // If specified, a Timestamp matching this interval will have to be before the + // end. + google.protobuf.Timestamp end_time = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/latlng.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/latlng.proto new file mode 100644 index 000000000000..9231456e328f --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/latlng.proto @@ -0,0 +1,37 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/latlng;latlng"; +option java_multiple_files = true; +option java_outer_classname = "LatLngProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// An object that represents a latitude/longitude pair. This is expressed as a +// pair of doubles to represent degrees latitude and degrees longitude. Unless +// specified otherwise, this must conform to the +// WGS84 +// standard. Values must be within normalized ranges. +message LatLng { + // The latitude in degrees. It must be in the range [-90.0, +90.0]. + double latitude = 1; + + // The longitude in degrees. It must be in the range [-180.0, +180.0]. + double longitude = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/localized_text.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/localized_text.proto new file mode 100644 index 000000000000..5c6922b8c0cd --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/localized_text.proto @@ -0,0 +1,36 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/localized_text;localized_text"; +option java_multiple_files = true; +option java_outer_classname = "LocalizedTextProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Localized variant of a text in a particular language. +message LocalizedText { + // Localized string in the language corresponding to `language_code' below. + string text = 1; + + // The text's BCP-47 language code, such as "en-US" or "sr-Latn". + // + // For more information, see + // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + string language_code = 2; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/money.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/money.proto new file mode 100644 index 000000000000..98d6494e4210 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/money.proto @@ -0,0 +1,42 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/money;money"; +option java_multiple_files = true; +option java_outer_classname = "MoneyProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents an amount of money with its currency type. +message Money { + // The three-letter currency code defined in ISO 4217. + string currency_code = 1; + + // The whole units of the amount. + // For example if `currencyCode` is `"USD"`, then 1 unit is one US dollar. + int64 units = 2; + + // Number of nano (10^-9) units of the amount. + // The value must be between -999,999,999 and +999,999,999 inclusive. + // If `units` is positive, `nanos` must be positive or zero. + // If `units` is zero, `nanos` can be positive, zero, or negative. + // If `units` is negative, `nanos` must be negative or zero. + // For example $-1.75 is represented as `units`=-1 and `nanos`=-750,000,000. + int32 nanos = 3; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/month.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/month.proto new file mode 100644 index 000000000000..99e7551b1416 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/month.proto @@ -0,0 +1,65 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option go_package = "google.golang.org/genproto/googleapis/type/month;month"; +option java_multiple_files = true; +option java_outer_classname = "MonthProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents a month in the Gregorian calendar. +enum Month { + // The unspecified month. + MONTH_UNSPECIFIED = 0; + + // The month of January. + JANUARY = 1; + + // The month of February. + FEBRUARY = 2; + + // The month of March. + MARCH = 3; + + // The month of April. + APRIL = 4; + + // The month of May. + MAY = 5; + + // The month of June. + JUNE = 6; + + // The month of July. + JULY = 7; + + // The month of August. + AUGUST = 8; + + // The month of September. + SEPTEMBER = 9; + + // The month of October. + OCTOBER = 10; + + // The month of November. + NOVEMBER = 11; + + // The month of December. + DECEMBER = 12; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/phone_number.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/phone_number.proto new file mode 100644 index 000000000000..7bbb7d873229 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/phone_number.proto @@ -0,0 +1,113 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/phone_number;phone_number"; +option java_multiple_files = true; +option java_outer_classname = "PhoneNumberProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// An object representing a phone number, suitable as an API wire format. +// +// This representation: +// +// - should not be used for locale-specific formatting of a phone number, such +// as "+1 (650) 253-0000 ext. 123" +// +// - is not designed for efficient storage +// - may not be suitable for dialing - specialized libraries (see references) +// should be used to parse the number for that purpose +// +// To do something meaningful with this number, such as format it for various +// use-cases, convert it to an `i18n.phonenumbers.PhoneNumber` object first. +// +// For instance, in Java this would be: +// +// com.google.type.PhoneNumber wireProto = +// com.google.type.PhoneNumber.newBuilder().build(); +// com.google.i18n.phonenumbers.Phonenumber.PhoneNumber phoneNumber = +// PhoneNumberUtil.getInstance().parse(wireProto.getE164Number(), "ZZ"); +// if (!wireProto.getExtension().isEmpty()) { +// phoneNumber.setExtension(wireProto.getExtension()); +// } +// +// Reference(s): +// - https://github.com/google/libphonenumber +message PhoneNumber { + // An object representing a short code, which is a phone number that is + // typically much shorter than regular phone numbers and can be used to + // address messages in MMS and SMS systems, as well as for abbreviated dialing + // (e.g. "Text 611 to see how many minutes you have remaining on your plan."). + // + // Short codes are restricted to a region and are not internationally + // dialable, which means the same short code can exist in different regions, + // with different usage and pricing, even if those regions share the same + // country calling code (e.g. US and CA). + message ShortCode { + // Required. The BCP-47 region code of the location where calls to this + // short code can be made, such as "US" and "BB". + // + // Reference(s): + // - http://www.unicode.org/reports/tr35/#unicode_region_subtag + string region_code = 1; + + // Required. The short code digits, without a leading plus ('+') or country + // calling code, e.g. "611". + string number = 2; + } + + // Required. Either a regular number, or a short code. New fields may be + // added to the oneof below in the future, so clients should ignore phone + // numbers for which none of the fields they coded against are set. + oneof kind { + // The phone number, represented as a leading plus sign ('+'), followed by a + // phone number that uses a relaxed ITU E.164 format consisting of the + // country calling code (1 to 3 digits) and the subscriber number, with no + // additional spaces or formatting, e.g.: + // - correct: "+15552220123" + // - incorrect: "+1 (555) 222-01234 x123". + // + // The ITU E.164 format limits the latter to 12 digits, but in practice not + // all countries respect that, so we relax that restriction here. + // National-only numbers are not allowed. + // + // References: + // - https://www.itu.int/rec/T-REC-E.164-201011-I + // - https://en.wikipedia.org/wiki/E.164. + // - https://en.wikipedia.org/wiki/List_of_country_calling_codes + string e164_number = 1; + + // A short code. + // + // Reference(s): + // - https://en.wikipedia.org/wiki/Short_code + ShortCode short_code = 2; + } + + // The phone number's extension. The extension is not standardized in ITU + // recommendations, except for being defined as a series of numbers with a + // maximum length of 40 digits. Other than digits, some other dialing + // characters such as ',' (indicating a wait) or '#' may be stored here. + // + // Note that no regions currently use extensions with short codes, so this + // field is normally only set in conjunction with an E.164 number. It is held + // separately from the E.164 number to allow for short code extensions in the + // future. + string extension = 3; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/postal_address.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/postal_address.proto new file mode 100644 index 000000000000..c57c7c31a2cc --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/postal_address.proto @@ -0,0 +1,134 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/postaladdress;postaladdress"; +option java_multiple_files = true; +option java_outer_classname = "PostalAddressProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents a postal address, e.g. for postal delivery or payments addresses. +// Given a postal address, a postal service can deliver items to a premise, P.O. +// Box or similar. +// It is not intended to model geographical locations (roads, towns, +// mountains). +// +// In typical usage an address would be created via user input or from importing +// existing data, depending on the type of process. +// +// Advice on address input / editing: +// - Use an i18n-ready address widget such as +// https://github.com/google/libaddressinput) +// - Users should not be presented with UI elements for input or editing of +// fields outside countries where that field is used. +// +// For more guidance on how to use this schema, please see: +// https://support.google.com/business/answer/6397478 +message PostalAddress { + // The schema revision of the `PostalAddress`. This must be set to 0, which is + // the latest revision. + // + // All new revisions **must** be backward compatible with old revisions. + int32 revision = 1; + + // Required. CLDR region code of the country/region of the address. This + // is never inferred and it is up to the user to ensure the value is + // correct. See http://cldr.unicode.org/ and + // http://www.unicode.org/cldr/charts/30/supplemental/territory_information.html + // for details. Example: "CH" for Switzerland. + string region_code = 2; + + // Optional. BCP-47 language code of the contents of this address (if + // known). This is often the UI language of the input form or is expected + // to match one of the languages used in the address' country/region, or their + // transliterated equivalents. + // This can affect formatting in certain countries, but is not critical + // to the correctness of the data and will never affect any validation or + // other non-formatting related operations. + // + // If this value is not known, it should be omitted (rather than specifying a + // possibly incorrect default). + // + // Examples: "zh-Hant", "ja", "ja-Latn", "en". + string language_code = 3; + + // Optional. Postal code of the address. Not all countries use or require + // postal codes to be present, but where they are used, they may trigger + // additional validation with other parts of the address (e.g. state/zip + // validation in the U.S.A.). + string postal_code = 4; + + // Optional. Additional, country-specific, sorting code. This is not used + // in most regions. Where it is used, the value is either a string like + // "CEDEX", optionally followed by a number (e.g. "CEDEX 7"), or just a number + // alone, representing the "sector code" (Jamaica), "delivery area indicator" + // (Malawi) or "post office indicator" (e.g. Côte d'Ivoire). + string sorting_code = 5; + + // Optional. Highest administrative subdivision which is used for postal + // addresses of a country or region. + // For example, this can be a state, a province, an oblast, or a prefecture. + // Specifically, for Spain this is the province and not the autonomous + // community (e.g. "Barcelona" and not "Catalonia"). + // Many countries don't use an administrative area in postal addresses. E.g. + // in Switzerland this should be left unpopulated. + string administrative_area = 6; + + // Optional. Generally refers to the city/town portion of the address. + // Examples: US city, IT comune, UK post town. + // In regions of the world where localities are not well defined or do not fit + // into this structure well, leave locality empty and use address_lines. + string locality = 7; + + // Optional. Sublocality of the address. + // For example, this can be neighborhoods, boroughs, districts. + string sublocality = 8; + + // Unstructured address lines describing the lower levels of an address. + // + // Because values in address_lines do not have type information and may + // sometimes contain multiple values in a single field (e.g. + // "Austin, TX"), it is important that the line order is clear. The order of + // address lines should be "envelope order" for the country/region of the + // address. In places where this can vary (e.g. Japan), address_language is + // used to make it explicit (e.g. "ja" for large-to-small ordering and + // "ja-Latn" or "en" for small-to-large). This way, the most specific line of + // an address can be selected based on the language. + // + // The minimum permitted structural representation of an address consists + // of a region_code with all remaining information placed in the + // address_lines. It would be possible to format such an address very + // approximately without geocoding, but no semantic reasoning could be + // made about any of the address components until it was at least + // partially resolved. + // + // Creating an address only containing a region_code and address_lines, and + // then geocoding is the recommended way to handle completely unstructured + // addresses (as opposed to guessing which parts of the address should be + // localities or administrative areas). + repeated string address_lines = 9; + + // Optional. The recipient at the address. + // This field may, under certain circumstances, contain multiline information. + // For example, it might contain "care of" information. + repeated string recipients = 10; + + // Optional. The name of the organization at the address. + string organization = 11; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/quaternion.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/quaternion.proto new file mode 100644 index 000000000000..dfb822deff16 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/quaternion.proto @@ -0,0 +1,94 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/quaternion;quaternion"; +option java_multiple_files = true; +option java_outer_classname = "QuaternionProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// A quaternion is defined as the quotient of two directed lines in a +// three-dimensional space or equivalently as the quotient of two Euclidean +// vectors (https://en.wikipedia.org/wiki/Quaternion). +// +// Quaternions are often used in calculations involving three-dimensional +// rotations (https://en.wikipedia.org/wiki/Quaternions_and_spatial_rotation), +// as they provide greater mathematical robustness by avoiding the gimbal lock +// problems that can be encountered when using Euler angles +// (https://en.wikipedia.org/wiki/Gimbal_lock). +// +// Quaternions are generally represented in this form: +// +// w + xi + yj + zk +// +// where x, y, z, and w are real numbers, and i, j, and k are three imaginary +// numbers. +// +// Our naming choice `(x, y, z, w)` comes from the desire to avoid confusion for +// those interested in the geometric properties of the quaternion in the 3D +// Cartesian space. Other texts often use alternative names or subscripts, such +// as `(a, b, c, d)`, `(1, i, j, k)`, or `(0, 1, 2, 3)`, which are perhaps +// better suited for mathematical interpretations. +// +// To avoid any confusion, as well as to maintain compatibility with a large +// number of software libraries, the quaternions represented using the protocol +// buffer below *must* follow the Hamilton convention, which defines `ij = k` +// (i.e. a right-handed algebra), and therefore: +// +// i^2 = j^2 = k^2 = ijk = −1 +// ij = −ji = k +// jk = −kj = i +// ki = −ik = j +// +// Please DO NOT use this to represent quaternions that follow the JPL +// convention, or any of the other quaternion flavors out there. +// +// Definitions: +// +// - Quaternion norm (or magnitude): `sqrt(x^2 + y^2 + z^2 + w^2)`. +// - Unit (or normalized) quaternion: a quaternion whose norm is 1. +// - Pure quaternion: a quaternion whose scalar component (`w`) is 0. +// - Rotation quaternion: a unit quaternion used to represent rotation. +// - Orientation quaternion: a unit quaternion used to represent orientation. +// +// A quaternion can be normalized by dividing it by its norm. The resulting +// quaternion maintains the same direction, but has a norm of 1, i.e. it moves +// on the unit sphere. This is generally necessary for rotation and orientation +// quaternions, to avoid rounding errors: +// https://en.wikipedia.org/wiki/Rotation_formalisms_in_three_dimensions +// +// Note that `(x, y, z, w)` and `(-x, -y, -z, -w)` represent the same rotation, +// but normalization would be even more useful, e.g. for comparison purposes, if +// it would produce a unique representation. It is thus recommended that `w` be +// kept positive, which can be achieved by changing all the signs when `w` is +// negative. +// +message Quaternion { + // The x component. + double x = 1; + + // The y component. + double y = 2; + + // The z component. + double z = 3; + + // The scalar component. + double w = 4; +} diff --git a/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/timeofday.proto b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/timeofday.proto new file mode 100644 index 000000000000..5cb48aa936f3 --- /dev/null +++ b/java/flight/flight-core/target/protoc-dependencies/d0a0ddc27debaf88a865ae1b64c8093e/google/type/timeofday.proto @@ -0,0 +1,44 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/type/timeofday;timeofday"; +option java_multiple_files = true; +option java_outer_classname = "TimeOfDayProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents a time of day. The date and time zone are either not significant +// or are specified elsewhere. An API may choose to allow leap seconds. Related +// types are [google.type.Date][google.type.Date] and +// `google.protobuf.Timestamp`. +message TimeOfDay { + // Hours of day in 24 hour format. Should be from 0 to 23. An API may choose + // to allow the value "24:00:00" for scenarios like business closing time. + int32 hours = 1; + + // Minutes of hour of day. Must be from 0 to 59. + int32 minutes = 2; + + // Seconds of minutes of the time. Must normally be from 0 to 59. An API may + // allow the value 60 if it allows leap-seconds. + int32 seconds = 3; + + // Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999. + int32 nanos = 4; +} diff --git a/java/flight/flight-core/target/protoc-plugins/protoc-3.23.1-osx-aarch_64.exe b/java/flight/flight-core/target/protoc-plugins/protoc-3.23.1-osx-aarch_64.exe new file mode 100755 index 000000000000..9c2f8c62a974 Binary files /dev/null and b/java/flight/flight-core/target/protoc-plugins/protoc-3.23.1-osx-aarch_64.exe differ diff --git a/java/flight/flight-core/target/protoc-plugins/protoc-gen-grpc-java-1.63.0-osx-aarch_64.exe b/java/flight/flight-core/target/protoc-plugins/protoc-gen-grpc-java-1.63.0-osx-aarch_64.exe new file mode 100755 index 000000000000..3dcb0014c6b5 Binary files /dev/null and b/java/flight/flight-core/target/protoc-plugins/protoc-gen-grpc-java-1.63.0-osx-aarch_64.exe differ diff --git a/java/flight/flight-core/target/test-classes/logback.xml b/java/flight/flight-core/target/test-classes/logback.xml new file mode 100644 index 000000000000..444b2ed6d839 --- /dev/null +++ b/java/flight/flight-core/target/test-classes/logback.xml @@ -0,0 +1,28 @@ + + + + + + + true + 10000 + true + ${LILITH_HOSTNAME:-localhost} + + + + + + + + diff --git a/java/flight/flight-core/target/test-classes/perf.proto b/java/flight/flight-core/target/test-classes/perf.proto new file mode 100644 index 000000000000..99f35a9e65de --- /dev/null +++ b/java/flight/flight-core/target/test-classes/perf.proto @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +option java_package = "org.apache.arrow.flight.perf.impl"; + +message Perf { + bytes schema = 1; + int32 stream_count = 2; + int64 records_per_stream = 3; + int32 records_per_batch = 4; +} + +/* + * Payload of ticket + */ +message Token { + + // definition of entire flight. + Perf definition = 1; + + // inclusive start + int64 start = 2; + + // exclusive end + int64 end = 3; + +} + diff --git a/java/flight/flight-core/target/test-classes/test.proto b/java/flight/flight-core/target/test-classes/test.proto new file mode 100644 index 000000000000..6fa1890b2b71 --- /dev/null +++ b/java/flight/flight-core/target/test-classes/test.proto @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +option java_package = "org.apache.arrow.flight"; + +import "google/protobuf/empty.proto"; + +service TestService { + rpc Test(google.protobuf.Empty) returns (google.protobuf.Empty) {} +} diff --git a/java/flight/flight-integration-tests/target/classes/arrow-git.properties b/java/flight/flight-integration-tests/target/classes/arrow-git.properties new file mode 100644 index 000000000000..aa3cf5d7b4c4 --- /dev/null +++ b/java/flight/flight-integration-tests/target/classes/arrow-git.properties @@ -0,0 +1,26 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=21.08.2025 @ 12\:10\:26 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Ra\u00FAl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 diff --git a/java/flight/flight-integration-tests/target/test-classes/logback.xml b/java/flight/flight-integration-tests/target/test-classes/logback.xml new file mode 100644 index 000000000000..95fb0b37dc5d --- /dev/null +++ b/java/flight/flight-integration-tests/target/test-classes/logback.xml @@ -0,0 +1,28 @@ + + + + + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + + + diff --git a/java/flight/flight-sql-jdbc-core/src/main/resources/properties/flight.properties b/java/flight/flight-sql-jdbc-core/src/main/resources/properties/flight.properties new file mode 100644 index 000000000000..85140d8d6083 --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/src/main/resources/properties/flight.properties @@ -0,0 +1,70 @@ +#Properties +additionalparam=-Xdoclint\:none +arguments= +arrow.vector.classifier= +checker.framework.version=3.42.0 +checkstyle.failOnViolation=true +dep.avro.version=1.11.3 +dep.fbs.version=23.5.26 +dep.grpc-bom.version=1.63.0 +dep.guava-bom.version=33.0.0-jre +dep.hadoop.version=3.4.0 +dep.jackson-bom.version=2.17.0 +dep.junit.jupiter.version=5.10.2 +dep.junit.platform.version=1.9.0 +dep.netty-bom.version=4.1.108.Final +dep.protobuf-bom.version=3.23.1 +dep.slf4j.version=2.0.11 +distMgmtSnapshotsName=Apache Development Snapshot Repository +distMgmtSnapshotsUrl=https\://repository.apache.org/content/repositories/snapshots +doclint=none +error_prone_core.version=2.24.0 +errorprone.javac.version=9+181-r4173-1 +forkCount=2 +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=21.08.2025 @ 12\:10\:27 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Raúl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 +gpg.useagent=true +m2e.disableTestClasspathFlag=false +maven-compiler-plugin.version=3.12.1 +maven.compiler.source=1.6 +maven.compiler.target=1.6 +maven.multiModuleProjectDirectory=/Users/christopher.pride/scratch/arrow +mockito.core.version=5.11.0 +mockito.inline.version=5.2.0 +org.apache.arrow.flight.jdbc-driver.name=Arrow Flight SQL JDBC Driver Core +org.apache.arrow.flight.jdbc-driver.version=16.1.0 +org.apache.arrow.flight.name=org.apache.arrow\:arrow-flight +org.apache.arrow.flight.version=16.1.0 +organization.logo=https\://www.apache.org/images/asf_logo_wide.gif +os.detected.arch=aarch_64 +os.detected.bitness=64 +os.detected.classifier=osx-aarch_64 +os.detected.name=osx +project.build.sourceEncoding=UTF-8 +project.reporting.outputEncoding=UTF-8 +sourceReleaseAssemblyDescriptor=source-release +surefire.version=2.19.1 +target.gen.source.path=/Users/christopher.pride/scratch/arrow/java/flight/flight-sql-jdbc-core/target/generated-sources diff --git a/java/flight/flight-sql-jdbc-core/target/classes/META-INF/services/java.sql.Driver b/java/flight/flight-sql-jdbc-core/target/classes/META-INF/services/java.sql.Driver new file mode 100644 index 000000000000..83cfb23427f7 --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/target/classes/META-INF/services/java.sql.Driver @@ -0,0 +1,15 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +org.apache.arrow.driver.jdbc.ArrowFlightJdbcDriver \ No newline at end of file diff --git a/java/flight/flight-sql-jdbc-core/target/classes/arrow-git.properties b/java/flight/flight-sql-jdbc-core/target/classes/arrow-git.properties new file mode 100644 index 000000000000..63b01a20b34b --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/target/classes/arrow-git.properties @@ -0,0 +1,26 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=21.08.2025 @ 12\:10\:27 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Ra\u00FAl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 diff --git a/java/flight/flight-sql-jdbc-core/target/test-classes/keys/keyStore.jks b/java/flight/flight-sql-jdbc-core/target/test-classes/keys/keyStore.jks new file mode 100644 index 000000000000..32a9bedea500 Binary files /dev/null and b/java/flight/flight-sql-jdbc-core/target/test-classes/keys/keyStore.jks differ diff --git a/java/flight/flight-sql-jdbc-core/target/test-classes/keys/noCertificate.jks b/java/flight/flight-sql-jdbc-core/target/test-classes/keys/noCertificate.jks new file mode 100644 index 000000000000..071a1ebf97b3 Binary files /dev/null and b/java/flight/flight-sql-jdbc-core/target/test-classes/keys/noCertificate.jks differ diff --git a/java/flight/flight-sql-jdbc-core/target/test-classes/logback.xml b/java/flight/flight-sql-jdbc-core/target/test-classes/logback.xml new file mode 100644 index 000000000000..ce66f8d82acd --- /dev/null +++ b/java/flight/flight-sql-jdbc-core/target/test-classes/logback.xml @@ -0,0 +1,27 @@ + + + + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + + + diff --git a/java/flight/flight-sql-jdbc-driver/target/classes/arrow-git.properties b/java/flight/flight-sql-jdbc-driver/target/classes/arrow-git.properties new file mode 100644 index 000000000000..6b3794292e47 --- /dev/null +++ b/java/flight/flight-sql-jdbc-driver/target/classes/arrow-git.properties @@ -0,0 +1,26 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=21.08.2025 @ 12\:10\:28 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Ra\u00FAl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 diff --git a/java/flight/flight-sql/target/classes/arrow-git.properties b/java/flight/flight-sql/target/classes/arrow-git.properties new file mode 100644 index 000000000000..aa3cf5d7b4c4 --- /dev/null +++ b/java/flight/flight-sql/target/classes/arrow-git.properties @@ -0,0 +1,26 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=21.08.2025 @ 12\:10\:26 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Ra\u00FAl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 diff --git a/java/flight/flight-sql/target/test-classes/logback.xml b/java/flight/flight-sql/target/test-classes/logback.xml new file mode 100644 index 000000000000..4c54d18a210f --- /dev/null +++ b/java/flight/flight-sql/target/test-classes/logback.xml @@ -0,0 +1,28 @@ + + + + + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + + + diff --git a/java/format/target/classes/arrow-git.properties b/java/format/target/classes/arrow-git.properties new file mode 100644 index 000000000000..adb4932d13a2 --- /dev/null +++ b/java/format/target/classes/arrow-git.properties @@ -0,0 +1,26 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=21.08.2025 @ 12\:10\:12 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Ra\u00FAl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Binary$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/Binary$Vector.class new file mode 100644 index 000000000000..7d8195052030 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Binary$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Binary.class b/java/format/target/classes/org/apache/arrow/flatbuf/Binary.class new file mode 100644 index 000000000000..8affcd12b65b Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Binary.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/BinaryView$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/BinaryView$Vector.class new file mode 100644 index 000000000000..d684fe6829dc Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/BinaryView$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/BinaryView.class b/java/format/target/classes/org/apache/arrow/flatbuf/BinaryView.class new file mode 100644 index 000000000000..e91c52328cb3 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/BinaryView.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Block$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/Block$Vector.class new file mode 100644 index 000000000000..2652bd48def6 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Block$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Block.class b/java/format/target/classes/org/apache/arrow/flatbuf/Block.class new file mode 100644 index 000000000000..c5d708582687 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Block.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/BodyCompression$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/BodyCompression$Vector.class new file mode 100644 index 000000000000..8dcbde93cf8d Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/BodyCompression$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/BodyCompression.class b/java/format/target/classes/org/apache/arrow/flatbuf/BodyCompression.class new file mode 100644 index 000000000000..7a8302b0b06a Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/BodyCompression.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/BodyCompressionMethod.class b/java/format/target/classes/org/apache/arrow/flatbuf/BodyCompressionMethod.class new file mode 100644 index 000000000000..50b696c649e9 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/BodyCompressionMethod.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Bool$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/Bool$Vector.class new file mode 100644 index 000000000000..f49b6ed0887c Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Bool$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Bool.class b/java/format/target/classes/org/apache/arrow/flatbuf/Bool.class new file mode 100644 index 000000000000..ec8b97690ca7 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Bool.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Buffer$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/Buffer$Vector.class new file mode 100644 index 000000000000..f4b18694574f Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Buffer$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Buffer.class b/java/format/target/classes/org/apache/arrow/flatbuf/Buffer.class new file mode 100644 index 000000000000..7fc8ff09463d Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Buffer.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/CompressionType.class b/java/format/target/classes/org/apache/arrow/flatbuf/CompressionType.class new file mode 100644 index 000000000000..59a7d7b4c79b Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/CompressionType.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Date$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/Date$Vector.class new file mode 100644 index 000000000000..59250a88babd Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Date$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Date.class b/java/format/target/classes/org/apache/arrow/flatbuf/Date.class new file mode 100644 index 000000000000..16105b576023 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Date.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/DateUnit.class b/java/format/target/classes/org/apache/arrow/flatbuf/DateUnit.class new file mode 100644 index 000000000000..89b03ad7fd77 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/DateUnit.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Decimal$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/Decimal$Vector.class new file mode 100644 index 000000000000..d660ce6010df Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Decimal$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Decimal.class b/java/format/target/classes/org/apache/arrow/flatbuf/Decimal.class new file mode 100644 index 000000000000..172ca15bb366 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Decimal.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/DictionaryBatch$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/DictionaryBatch$Vector.class new file mode 100644 index 000000000000..87728abf2907 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/DictionaryBatch$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/DictionaryBatch.class b/java/format/target/classes/org/apache/arrow/flatbuf/DictionaryBatch.class new file mode 100644 index 000000000000..2251a48a5288 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/DictionaryBatch.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/DictionaryEncoding$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/DictionaryEncoding$Vector.class new file mode 100644 index 000000000000..7e7521a31872 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/DictionaryEncoding$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/DictionaryEncoding.class b/java/format/target/classes/org/apache/arrow/flatbuf/DictionaryEncoding.class new file mode 100644 index 000000000000..bd3f1f03db46 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/DictionaryEncoding.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/DictionaryKind.class b/java/format/target/classes/org/apache/arrow/flatbuf/DictionaryKind.class new file mode 100644 index 000000000000..a128b942fb3c Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/DictionaryKind.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Duration$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/Duration$Vector.class new file mode 100644 index 000000000000..a244846a9ca0 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Duration$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Duration.class b/java/format/target/classes/org/apache/arrow/flatbuf/Duration.class new file mode 100644 index 000000000000..37a2e4a35b48 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Duration.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Endianness.class b/java/format/target/classes/org/apache/arrow/flatbuf/Endianness.class new file mode 100644 index 000000000000..bfb9e4dfbb39 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Endianness.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Feature.class b/java/format/target/classes/org/apache/arrow/flatbuf/Feature.class new file mode 100644 index 000000000000..9044095e4380 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Feature.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Field$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/Field$Vector.class new file mode 100644 index 000000000000..243cbe36155f Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Field$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Field.class b/java/format/target/classes/org/apache/arrow/flatbuf/Field.class new file mode 100644 index 000000000000..799870feda8e Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Field.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/FieldNode$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/FieldNode$Vector.class new file mode 100644 index 000000000000..792dc40cae87 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/FieldNode$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/FieldNode.class b/java/format/target/classes/org/apache/arrow/flatbuf/FieldNode.class new file mode 100644 index 000000000000..19451f275b97 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/FieldNode.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/FixedSizeBinary$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/FixedSizeBinary$Vector.class new file mode 100644 index 000000000000..1d3292402255 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/FixedSizeBinary$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/FixedSizeBinary.class b/java/format/target/classes/org/apache/arrow/flatbuf/FixedSizeBinary.class new file mode 100644 index 000000000000..2d4f7b2027f3 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/FixedSizeBinary.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/FixedSizeList$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/FixedSizeList$Vector.class new file mode 100644 index 000000000000..f968fb1bce4b Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/FixedSizeList$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/FixedSizeList.class b/java/format/target/classes/org/apache/arrow/flatbuf/FixedSizeList.class new file mode 100644 index 000000000000..3da65029848b Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/FixedSizeList.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/FloatingPoint$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/FloatingPoint$Vector.class new file mode 100644 index 000000000000..3124653ff093 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/FloatingPoint$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/FloatingPoint.class b/java/format/target/classes/org/apache/arrow/flatbuf/FloatingPoint.class new file mode 100644 index 000000000000..b075e3ec77a2 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/FloatingPoint.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Footer$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/Footer$Vector.class new file mode 100644 index 000000000000..727be89abf37 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Footer$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Footer.class b/java/format/target/classes/org/apache/arrow/flatbuf/Footer.class new file mode 100644 index 000000000000..b387ef16e004 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Footer.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Int$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/Int$Vector.class new file mode 100644 index 000000000000..5b771aa1a829 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Int$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Int.class b/java/format/target/classes/org/apache/arrow/flatbuf/Int.class new file mode 100644 index 000000000000..38183cbf0130 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Int.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Interval$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/Interval$Vector.class new file mode 100644 index 000000000000..fa09ad892256 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Interval$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Interval.class b/java/format/target/classes/org/apache/arrow/flatbuf/Interval.class new file mode 100644 index 000000000000..ea746ed8e6bc Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Interval.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/IntervalUnit.class b/java/format/target/classes/org/apache/arrow/flatbuf/IntervalUnit.class new file mode 100644 index 000000000000..fb2dd1576a20 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/IntervalUnit.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/KeyValue$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/KeyValue$Vector.class new file mode 100644 index 000000000000..3704c00faf7e Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/KeyValue$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/KeyValue.class b/java/format/target/classes/org/apache/arrow/flatbuf/KeyValue.class new file mode 100644 index 000000000000..78ff644e4d20 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/KeyValue.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/LargeBinary$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/LargeBinary$Vector.class new file mode 100644 index 000000000000..7cc1b1892e08 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/LargeBinary$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/LargeBinary.class b/java/format/target/classes/org/apache/arrow/flatbuf/LargeBinary.class new file mode 100644 index 000000000000..c0220ccd68a2 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/LargeBinary.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/LargeList$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/LargeList$Vector.class new file mode 100644 index 000000000000..d30c8113c1b1 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/LargeList$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/LargeList.class b/java/format/target/classes/org/apache/arrow/flatbuf/LargeList.class new file mode 100644 index 000000000000..207dbb825d0f Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/LargeList.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/LargeListView$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/LargeListView$Vector.class new file mode 100644 index 000000000000..7ecd7e18bac2 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/LargeListView$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/LargeListView.class b/java/format/target/classes/org/apache/arrow/flatbuf/LargeListView.class new file mode 100644 index 000000000000..bf7fa8dc20af Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/LargeListView.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/LargeUtf8$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/LargeUtf8$Vector.class new file mode 100644 index 000000000000..5e419b5138a0 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/LargeUtf8$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/LargeUtf8.class b/java/format/target/classes/org/apache/arrow/flatbuf/LargeUtf8.class new file mode 100644 index 000000000000..5b8a1b2094c4 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/LargeUtf8.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/List$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/List$Vector.class new file mode 100644 index 000000000000..b2e86f7650cf Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/List$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/List.class b/java/format/target/classes/org/apache/arrow/flatbuf/List.class new file mode 100644 index 000000000000..31bc9b3928a0 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/List.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/ListView$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/ListView$Vector.class new file mode 100644 index 000000000000..9aff23c32ded Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/ListView$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/ListView.class b/java/format/target/classes/org/apache/arrow/flatbuf/ListView.class new file mode 100644 index 000000000000..c3ec2df840ef Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/ListView.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Map$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/Map$Vector.class new file mode 100644 index 000000000000..d3d7f4204f23 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Map$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Map.class b/java/format/target/classes/org/apache/arrow/flatbuf/Map.class new file mode 100644 index 000000000000..7373c8933b3e Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Map.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Message$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/Message$Vector.class new file mode 100644 index 000000000000..a9466bbc94ed Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Message$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Message.class b/java/format/target/classes/org/apache/arrow/flatbuf/Message.class new file mode 100644 index 000000000000..7a62be43040c Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Message.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/MessageHeader.class b/java/format/target/classes/org/apache/arrow/flatbuf/MessageHeader.class new file mode 100644 index 000000000000..216f53e75700 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/MessageHeader.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/MetadataVersion.class b/java/format/target/classes/org/apache/arrow/flatbuf/MetadataVersion.class new file mode 100644 index 000000000000..f19e7f7fb97e Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/MetadataVersion.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Null$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/Null$Vector.class new file mode 100644 index 000000000000..677d759c0eb6 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Null$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Null.class b/java/format/target/classes/org/apache/arrow/flatbuf/Null.class new file mode 100644 index 000000000000..67f64c23622b Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Null.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Precision.class b/java/format/target/classes/org/apache/arrow/flatbuf/Precision.class new file mode 100644 index 000000000000..08bff99b780d Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Precision.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/RecordBatch$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/RecordBatch$Vector.class new file mode 100644 index 000000000000..617ae653e811 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/RecordBatch$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/RecordBatch.class b/java/format/target/classes/org/apache/arrow/flatbuf/RecordBatch.class new file mode 100644 index 000000000000..42ac09dd34d5 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/RecordBatch.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/RunEndEncoded$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/RunEndEncoded$Vector.class new file mode 100644 index 000000000000..4cc7a40a346b Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/RunEndEncoded$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/RunEndEncoded.class b/java/format/target/classes/org/apache/arrow/flatbuf/RunEndEncoded.class new file mode 100644 index 000000000000..1895670f9fdd Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/RunEndEncoded.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Schema$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/Schema$Vector.class new file mode 100644 index 000000000000..516bafc3d290 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Schema$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Schema.class b/java/format/target/classes/org/apache/arrow/flatbuf/Schema.class new file mode 100644 index 000000000000..116af15ea3a2 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Schema.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/SparseMatrixCompressedAxis.class b/java/format/target/classes/org/apache/arrow/flatbuf/SparseMatrixCompressedAxis.class new file mode 100644 index 000000000000..9daffb9b18a7 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/SparseMatrixCompressedAxis.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/SparseMatrixIndexCSX$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/SparseMatrixIndexCSX$Vector.class new file mode 100644 index 000000000000..2d55049413ec Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/SparseMatrixIndexCSX$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/SparseMatrixIndexCSX.class b/java/format/target/classes/org/apache/arrow/flatbuf/SparseMatrixIndexCSX.class new file mode 100644 index 000000000000..23e9efe6a260 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/SparseMatrixIndexCSX.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/SparseTensor$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/SparseTensor$Vector.class new file mode 100644 index 000000000000..c857a1ae2102 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/SparseTensor$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/SparseTensor.class b/java/format/target/classes/org/apache/arrow/flatbuf/SparseTensor.class new file mode 100644 index 000000000000..7bf4457f7891 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/SparseTensor.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/SparseTensorIndex.class b/java/format/target/classes/org/apache/arrow/flatbuf/SparseTensorIndex.class new file mode 100644 index 000000000000..6e733c9f2b87 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/SparseTensorIndex.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/SparseTensorIndexCOO$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/SparseTensorIndexCOO$Vector.class new file mode 100644 index 000000000000..2e98b0e97d0a Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/SparseTensorIndexCOO$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/SparseTensorIndexCOO.class b/java/format/target/classes/org/apache/arrow/flatbuf/SparseTensorIndexCOO.class new file mode 100644 index 000000000000..342f3e1afb21 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/SparseTensorIndexCOO.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/SparseTensorIndexCSF$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/SparseTensorIndexCSF$Vector.class new file mode 100644 index 000000000000..c80e76ff3520 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/SparseTensorIndexCSF$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/SparseTensorIndexCSF.class b/java/format/target/classes/org/apache/arrow/flatbuf/SparseTensorIndexCSF.class new file mode 100644 index 000000000000..e8005c570cf8 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/SparseTensorIndexCSF.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Struct_$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/Struct_$Vector.class new file mode 100644 index 000000000000..601b6a2b0932 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Struct_$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Struct_.class b/java/format/target/classes/org/apache/arrow/flatbuf/Struct_.class new file mode 100644 index 000000000000..a3f1069ca79f Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Struct_.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Tensor$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/Tensor$Vector.class new file mode 100644 index 000000000000..2a9b1d3a8009 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Tensor$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Tensor.class b/java/format/target/classes/org/apache/arrow/flatbuf/Tensor.class new file mode 100644 index 000000000000..dd039d58da09 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Tensor.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/TensorDim$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/TensorDim$Vector.class new file mode 100644 index 000000000000..e1d1e6e0561a Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/TensorDim$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/TensorDim.class b/java/format/target/classes/org/apache/arrow/flatbuf/TensorDim.class new file mode 100644 index 000000000000..e5f87d6b2897 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/TensorDim.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Time$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/Time$Vector.class new file mode 100644 index 000000000000..f88edf57d551 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Time$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Time.class b/java/format/target/classes/org/apache/arrow/flatbuf/Time.class new file mode 100644 index 000000000000..4b76d7b848b4 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Time.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/TimeUnit.class b/java/format/target/classes/org/apache/arrow/flatbuf/TimeUnit.class new file mode 100644 index 000000000000..cd094cde7e33 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/TimeUnit.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Timestamp$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/Timestamp$Vector.class new file mode 100644 index 000000000000..9d724902b3e4 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Timestamp$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Timestamp.class b/java/format/target/classes/org/apache/arrow/flatbuf/Timestamp.class new file mode 100644 index 000000000000..277211573cdc Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Timestamp.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Type.class b/java/format/target/classes/org/apache/arrow/flatbuf/Type.class new file mode 100644 index 000000000000..e097c4fd46b9 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Type.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Union$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/Union$Vector.class new file mode 100644 index 000000000000..761e4233a68a Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Union$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Union.class b/java/format/target/classes/org/apache/arrow/flatbuf/Union.class new file mode 100644 index 000000000000..cd5a5f652cf7 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Union.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/UnionMode.class b/java/format/target/classes/org/apache/arrow/flatbuf/UnionMode.class new file mode 100644 index 000000000000..b8c2b7f656d4 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/UnionMode.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Utf8$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/Utf8$Vector.class new file mode 100644 index 000000000000..ccf1777824c3 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Utf8$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Utf8.class b/java/format/target/classes/org/apache/arrow/flatbuf/Utf8.class new file mode 100644 index 000000000000..4f0d87c6c611 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Utf8.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Utf8View$Vector.class b/java/format/target/classes/org/apache/arrow/flatbuf/Utf8View$Vector.class new file mode 100644 index 000000000000..cb482f1b6317 Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Utf8View$Vector.class differ diff --git a/java/format/target/classes/org/apache/arrow/flatbuf/Utf8View.class b/java/format/target/classes/org/apache/arrow/flatbuf/Utf8View.class new file mode 100644 index 000000000000..5e90a98b239b Binary files /dev/null and b/java/format/target/classes/org/apache/arrow/flatbuf/Utf8View.class differ diff --git a/java/gandiva/target/classes/arrow-git.properties b/java/gandiva/target/classes/arrow-git.properties new file mode 100644 index 000000000000..8e92e95d8f26 --- /dev/null +++ b/java/gandiva/target/classes/arrow-git.properties @@ -0,0 +1,26 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=21.08.2025 @ 12\:10\:13 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Ra\u00FAl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 diff --git a/java/gandiva/target/classes/gandiva/types.proto b/java/gandiva/target/classes/gandiva/types.proto new file mode 100644 index 000000000000..4ce342681d61 --- /dev/null +++ b/java/gandiva/target/classes/gandiva/types.proto @@ -0,0 +1,255 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +syntax = "proto3"; +package gandiva.types; + +option java_package = "org.apache.arrow.gandiva.ipc"; +option java_outer_classname = "GandivaTypes"; +option optimize_for = SPEED; + +enum GandivaType { + NONE = 0; // arrow::Type::NA + BOOL = 1; // arrow::Type::BOOL + UINT8 = 2; // arrow::Type::UINT8 + INT8 = 3; // arrow::Type::INT8 + UINT16 = 4; // represents arrow::Type fields in src/arrow/type.h + INT16 = 5; + UINT32 = 6; + INT32 = 7; + UINT64 = 8; + INT64 = 9; + HALF_FLOAT = 10; + FLOAT = 11; + DOUBLE = 12; + UTF8 = 13; + BINARY = 14; + FIXED_SIZE_BINARY = 15; + DATE32 = 16; + DATE64 = 17; + TIMESTAMP = 18; + TIME32 = 19; + TIME64 = 20; + INTERVAL = 21; + DECIMAL = 22; + LIST = 23; + STRUCT = 24; + UNION = 25; + DICTIONARY = 26; + MAP = 27; +} + +enum DateUnit { + DAY = 0; + MILLI = 1; +} + +enum TimeUnit { + SEC = 0; + MILLISEC = 1; + MICROSEC = 2; + NANOSEC = 3; +} + +enum IntervalType { + YEAR_MONTH = 0; + DAY_TIME = 1; +} + +enum SelectionVectorType { + SV_NONE = 0; + SV_INT16 = 1; + SV_INT32 = 2; +} + +message ExtGandivaType { + optional GandivaType type = 1; + optional uint32 width = 2; // used by FIXED_SIZE_BINARY + optional int32 precision = 3; // used by DECIMAL + optional int32 scale = 4; // used by DECIMAL + optional DateUnit dateUnit = 5; // used by DATE32/DATE64 + optional TimeUnit timeUnit = 6; // used by TIME32/TIME64 + optional string timeZone = 7; // used by TIMESTAMP + optional IntervalType intervalType = 8; // used by INTERVAL +} + +message Field { + // name of the field + optional string name = 1; + optional ExtGandivaType type = 2; + optional bool nullable = 3; + // for complex data types like structs, unions + repeated Field children = 4; +} + +message FieldNode { + optional Field field = 1; +} + +message FunctionNode { + optional string functionName = 1; + repeated TreeNode inArgs = 2; + optional ExtGandivaType returnType = 3; +} + +message IfNode { + optional TreeNode cond = 1; + optional TreeNode thenNode = 2; + optional TreeNode elseNode = 3; + optional ExtGandivaType returnType = 4; +} + +message AndNode { + repeated TreeNode args = 1; +} + +message OrNode { + repeated TreeNode args = 1; +} + +message NullNode { + optional ExtGandivaType type = 1; +} + +message IntNode { + optional int32 value = 1; +} + +message FloatNode { + optional float value = 1; +} + +message DoubleNode { + optional double value = 1; +} + +message BooleanNode { + optional bool value = 1; +} + +message LongNode { + optional int64 value = 1; +} + +message StringNode { + optional bytes value = 1; +} + +message BinaryNode { + optional bytes value = 1; +} + +message DecimalNode { + optional string value = 1; + optional int32 precision = 2; + optional int32 scale = 3; +} + + +message TreeNode { + optional FieldNode fieldNode = 1; + optional FunctionNode fnNode = 2; + + // control expressions + optional IfNode ifNode = 6; + optional AndNode andNode = 7; + optional OrNode orNode = 8; + + // literals + optional NullNode nullNode = 11; + optional IntNode intNode = 12; + optional FloatNode floatNode = 13; + optional LongNode longNode = 14; + optional BooleanNode booleanNode = 15; + optional DoubleNode doubleNode = 16; + optional StringNode stringNode = 17; + optional BinaryNode binaryNode = 18; + optional DecimalNode decimalNode = 19; + + // in expr + optional InNode inNode = 21; +} + +message ExpressionRoot { + optional TreeNode root = 1; + optional Field resultType = 2; +} + +message ExpressionList { + repeated ExpressionRoot exprs = 2; +} + +message Condition { + optional TreeNode root = 1; +} + +message Schema { + repeated Field columns = 1; +} + +message GandivaDataTypes { + repeated ExtGandivaType dataType = 1; +} + +message GandivaFunctions { + repeated FunctionSignature function = 1; +} + +message FunctionSignature { + optional string name = 1; + optional ExtGandivaType returnType = 2; + repeated ExtGandivaType paramTypes = 3; +} + +message InNode { + optional TreeNode node = 1; + optional IntConstants intValues = 2; + optional LongConstants longValues = 3; + optional StringConstants stringValues = 4; + optional BinaryConstants binaryValues = 5; + optional DecimalConstants decimalValues = 6; + optional FloatConstants floatValues = 7; + optional DoubleConstants doubleValues = 8; +} + +message IntConstants { + repeated IntNode intValues = 1; +} + +message LongConstants { + repeated LongNode longValues = 1; +} + +message DecimalConstants { + repeated DecimalNode decimalValues = 1; +} + +message FloatConstants { + repeated FloatNode floatValues = 1; +} + +message DoubleConstants { + repeated DoubleNode doubleValues = 1; +} + +message StringConstants { + repeated StringNode stringValues = 1; +} + +message BinaryConstants { + repeated BinaryNode binaryValues = 1; +} diff --git a/java/gandiva/target/generated-sources/protobuf/java/org/apache/arrow/gandiva/ipc/GandivaTypes.java b/java/gandiva/target/generated-sources/protobuf/java/org/apache/arrow/gandiva/ipc/GandivaTypes.java new file mode 100644 index 000000000000..ac77d5f9de17 --- /dev/null +++ b/java/gandiva/target/generated-sources/protobuf/java/org/apache/arrow/gandiva/ipc/GandivaTypes.java @@ -0,0 +1,29657 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: gandiva/types.proto + +package org.apache.arrow.gandiva.ipc; + +public final class GandivaTypes { + private GandivaTypes() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + /** + * Protobuf enum {@code gandiva.types.GandivaType} + */ + public enum GandivaType + implements com.google.protobuf.ProtocolMessageEnum { + /** + *

+     * arrow::Type::NA
+     * 
+ * + * NONE = 0; + */ + NONE(0), + /** + *
+     * arrow::Type::BOOL
+     * 
+ * + * BOOL = 1; + */ + BOOL(1), + /** + *
+     * arrow::Type::UINT8
+     * 
+ * + * UINT8 = 2; + */ + UINT8(2), + /** + *
+     * arrow::Type::INT8
+     * 
+ * + * INT8 = 3; + */ + INT8(3), + /** + *
+     * represents arrow::Type fields in src/arrow/type.h
+     * 
+ * + * UINT16 = 4; + */ + UINT16(4), + /** + * INT16 = 5; + */ + INT16(5), + /** + * UINT32 = 6; + */ + UINT32(6), + /** + * INT32 = 7; + */ + INT32(7), + /** + * UINT64 = 8; + */ + UINT64(8), + /** + * INT64 = 9; + */ + INT64(9), + /** + * HALF_FLOAT = 10; + */ + HALF_FLOAT(10), + /** + * FLOAT = 11; + */ + FLOAT(11), + /** + * DOUBLE = 12; + */ + DOUBLE(12), + /** + * UTF8 = 13; + */ + UTF8(13), + /** + * BINARY = 14; + */ + BINARY(14), + /** + * FIXED_SIZE_BINARY = 15; + */ + FIXED_SIZE_BINARY(15), + /** + * DATE32 = 16; + */ + DATE32(16), + /** + * DATE64 = 17; + */ + DATE64(17), + /** + * TIMESTAMP = 18; + */ + TIMESTAMP(18), + /** + * TIME32 = 19; + */ + TIME32(19), + /** + * TIME64 = 20; + */ + TIME64(20), + /** + * INTERVAL = 21; + */ + INTERVAL(21), + /** + * DECIMAL = 22; + */ + DECIMAL(22), + /** + * LIST = 23; + */ + LIST(23), + /** + * STRUCT = 24; + */ + STRUCT(24), + /** + * UNION = 25; + */ + UNION(25), + /** + * DICTIONARY = 26; + */ + DICTIONARY(26), + /** + * MAP = 27; + */ + MAP(27), + UNRECOGNIZED(-1), + ; + + /** + *
+     * arrow::Type::NA
+     * 
+ * + * NONE = 0; + */ + public static final int NONE_VALUE = 0; + /** + *
+     * arrow::Type::BOOL
+     * 
+ * + * BOOL = 1; + */ + public static final int BOOL_VALUE = 1; + /** + *
+     * arrow::Type::UINT8
+     * 
+ * + * UINT8 = 2; + */ + public static final int UINT8_VALUE = 2; + /** + *
+     * arrow::Type::INT8
+     * 
+ * + * INT8 = 3; + */ + public static final int INT8_VALUE = 3; + /** + *
+     * represents arrow::Type fields in src/arrow/type.h
+     * 
+ * + * UINT16 = 4; + */ + public static final int UINT16_VALUE = 4; + /** + * INT16 = 5; + */ + public static final int INT16_VALUE = 5; + /** + * UINT32 = 6; + */ + public static final int UINT32_VALUE = 6; + /** + * INT32 = 7; + */ + public static final int INT32_VALUE = 7; + /** + * UINT64 = 8; + */ + public static final int UINT64_VALUE = 8; + /** + * INT64 = 9; + */ + public static final int INT64_VALUE = 9; + /** + * HALF_FLOAT = 10; + */ + public static final int HALF_FLOAT_VALUE = 10; + /** + * FLOAT = 11; + */ + public static final int FLOAT_VALUE = 11; + /** + * DOUBLE = 12; + */ + public static final int DOUBLE_VALUE = 12; + /** + * UTF8 = 13; + */ + public static final int UTF8_VALUE = 13; + /** + * BINARY = 14; + */ + public static final int BINARY_VALUE = 14; + /** + * FIXED_SIZE_BINARY = 15; + */ + public static final int FIXED_SIZE_BINARY_VALUE = 15; + /** + * DATE32 = 16; + */ + public static final int DATE32_VALUE = 16; + /** + * DATE64 = 17; + */ + public static final int DATE64_VALUE = 17; + /** + * TIMESTAMP = 18; + */ + public static final int TIMESTAMP_VALUE = 18; + /** + * TIME32 = 19; + */ + public static final int TIME32_VALUE = 19; + /** + * TIME64 = 20; + */ + public static final int TIME64_VALUE = 20; + /** + * INTERVAL = 21; + */ + public static final int INTERVAL_VALUE = 21; + /** + * DECIMAL = 22; + */ + public static final int DECIMAL_VALUE = 22; + /** + * LIST = 23; + */ + public static final int LIST_VALUE = 23; + /** + * STRUCT = 24; + */ + public static final int STRUCT_VALUE = 24; + /** + * UNION = 25; + */ + public static final int UNION_VALUE = 25; + /** + * DICTIONARY = 26; + */ + public static final int DICTIONARY_VALUE = 26; + /** + * MAP = 27; + */ + public static final int MAP_VALUE = 27; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static GandivaType valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static GandivaType forNumber(int value) { + switch (value) { + case 0: return NONE; + case 1: return BOOL; + case 2: return UINT8; + case 3: return INT8; + case 4: return UINT16; + case 5: return INT16; + case 6: return UINT32; + case 7: return INT32; + case 8: return UINT64; + case 9: return INT64; + case 10: return HALF_FLOAT; + case 11: return FLOAT; + case 12: return DOUBLE; + case 13: return UTF8; + case 14: return BINARY; + case 15: return FIXED_SIZE_BINARY; + case 16: return DATE32; + case 17: return DATE64; + case 18: return TIMESTAMP; + case 19: return TIME32; + case 20: return TIME64; + case 21: return INTERVAL; + case 22: return DECIMAL; + case 23: return LIST; + case 24: return STRUCT; + case 25: return UNION; + case 26: return DICTIONARY; + case 27: return MAP; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + GandivaType> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public GandivaType findValueByNumber(int number) { + return GandivaType.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.getDescriptor().getEnumTypes().get(0); + } + + private static final GandivaType[] VALUES = values(); + + public static GandivaType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private GandivaType(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:gandiva.types.GandivaType) + } + + /** + * Protobuf enum {@code gandiva.types.DateUnit} + */ + public enum DateUnit + implements com.google.protobuf.ProtocolMessageEnum { + /** + * DAY = 0; + */ + DAY(0), + /** + * MILLI = 1; + */ + MILLI(1), + UNRECOGNIZED(-1), + ; + + /** + * DAY = 0; + */ + public static final int DAY_VALUE = 0; + /** + * MILLI = 1; + */ + public static final int MILLI_VALUE = 1; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static DateUnit valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static DateUnit forNumber(int value) { + switch (value) { + case 0: return DAY; + case 1: return MILLI; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + DateUnit> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public DateUnit findValueByNumber(int number) { + return DateUnit.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.getDescriptor().getEnumTypes().get(1); + } + + private static final DateUnit[] VALUES = values(); + + public static DateUnit valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private DateUnit(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:gandiva.types.DateUnit) + } + + /** + * Protobuf enum {@code gandiva.types.TimeUnit} + */ + public enum TimeUnit + implements com.google.protobuf.ProtocolMessageEnum { + /** + * SEC = 0; + */ + SEC(0), + /** + * MILLISEC = 1; + */ + MILLISEC(1), + /** + * MICROSEC = 2; + */ + MICROSEC(2), + /** + * NANOSEC = 3; + */ + NANOSEC(3), + UNRECOGNIZED(-1), + ; + + /** + * SEC = 0; + */ + public static final int SEC_VALUE = 0; + /** + * MILLISEC = 1; + */ + public static final int MILLISEC_VALUE = 1; + /** + * MICROSEC = 2; + */ + public static final int MICROSEC_VALUE = 2; + /** + * NANOSEC = 3; + */ + public static final int NANOSEC_VALUE = 3; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static TimeUnit valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static TimeUnit forNumber(int value) { + switch (value) { + case 0: return SEC; + case 1: return MILLISEC; + case 2: return MICROSEC; + case 3: return NANOSEC; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + TimeUnit> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public TimeUnit findValueByNumber(int number) { + return TimeUnit.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.getDescriptor().getEnumTypes().get(2); + } + + private static final TimeUnit[] VALUES = values(); + + public static TimeUnit valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private TimeUnit(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:gandiva.types.TimeUnit) + } + + /** + * Protobuf enum {@code gandiva.types.IntervalType} + */ + public enum IntervalType + implements com.google.protobuf.ProtocolMessageEnum { + /** + * YEAR_MONTH = 0; + */ + YEAR_MONTH(0), + /** + * DAY_TIME = 1; + */ + DAY_TIME(1), + UNRECOGNIZED(-1), + ; + + /** + * YEAR_MONTH = 0; + */ + public static final int YEAR_MONTH_VALUE = 0; + /** + * DAY_TIME = 1; + */ + public static final int DAY_TIME_VALUE = 1; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static IntervalType valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static IntervalType forNumber(int value) { + switch (value) { + case 0: return YEAR_MONTH; + case 1: return DAY_TIME; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + IntervalType> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public IntervalType findValueByNumber(int number) { + return IntervalType.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.getDescriptor().getEnumTypes().get(3); + } + + private static final IntervalType[] VALUES = values(); + + public static IntervalType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private IntervalType(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:gandiva.types.IntervalType) + } + + /** + * Protobuf enum {@code gandiva.types.SelectionVectorType} + */ + public enum SelectionVectorType + implements com.google.protobuf.ProtocolMessageEnum { + /** + * SV_NONE = 0; + */ + SV_NONE(0), + /** + * SV_INT16 = 1; + */ + SV_INT16(1), + /** + * SV_INT32 = 2; + */ + SV_INT32(2), + UNRECOGNIZED(-1), + ; + + /** + * SV_NONE = 0; + */ + public static final int SV_NONE_VALUE = 0; + /** + * SV_INT16 = 1; + */ + public static final int SV_INT16_VALUE = 1; + /** + * SV_INT32 = 2; + */ + public static final int SV_INT32_VALUE = 2; + + + public final int getNumber() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalArgumentException( + "Can't get the number of an unknown enum value."); + } + return value; + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static SelectionVectorType valueOf(int value) { + return forNumber(value); + } + + /** + * @param value The numeric wire value of the corresponding enum entry. + * @return The enum associated with the given numeric wire value. + */ + public static SelectionVectorType forNumber(int value) { + switch (value) { + case 0: return SV_NONE; + case 1: return SV_INT16; + case 2: return SV_INT32; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static final com.google.protobuf.Internal.EnumLiteMap< + SelectionVectorType> internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public SelectionVectorType findValueByNumber(int number) { + return SelectionVectorType.forNumber(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } + return getDescriptor().getValues().get(ordinal()); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.getDescriptor().getEnumTypes().get(4); + } + + private static final SelectionVectorType[] VALUES = values(); + + public static SelectionVectorType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + if (desc.getIndex() == -1) { + return UNRECOGNIZED; + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private SelectionVectorType(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:gandiva.types.SelectionVectorType) + } + + public interface ExtGandivaTypeOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.ExtGandivaType) + com.google.protobuf.MessageOrBuilder { + + /** + * optional .gandiva.types.GandivaType type = 1; + * @return Whether the type field is set. + */ + boolean hasType(); + /** + * optional .gandiva.types.GandivaType type = 1; + * @return The enum numeric value on the wire for type. + */ + int getTypeValue(); + /** + * optional .gandiva.types.GandivaType type = 1; + * @return The type. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaType getType(); + + /** + *
+     * used by FIXED_SIZE_BINARY
+     * 
+ * + * optional uint32 width = 2; + * @return Whether the width field is set. + */ + boolean hasWidth(); + /** + *
+     * used by FIXED_SIZE_BINARY
+     * 
+ * + * optional uint32 width = 2; + * @return The width. + */ + int getWidth(); + + /** + *
+     * used by DECIMAL
+     * 
+ * + * optional int32 precision = 3; + * @return Whether the precision field is set. + */ + boolean hasPrecision(); + /** + *
+     * used by DECIMAL
+     * 
+ * + * optional int32 precision = 3; + * @return The precision. + */ + int getPrecision(); + + /** + *
+     * used by DECIMAL
+     * 
+ * + * optional int32 scale = 4; + * @return Whether the scale field is set. + */ + boolean hasScale(); + /** + *
+     * used by DECIMAL
+     * 
+ * + * optional int32 scale = 4; + * @return The scale. + */ + int getScale(); + + /** + *
+     * used by DATE32/DATE64
+     * 
+ * + * optional .gandiva.types.DateUnit dateUnit = 5; + * @return Whether the dateUnit field is set. + */ + boolean hasDateUnit(); + /** + *
+     * used by DATE32/DATE64
+     * 
+ * + * optional .gandiva.types.DateUnit dateUnit = 5; + * @return The enum numeric value on the wire for dateUnit. + */ + int getDateUnitValue(); + /** + *
+     * used by DATE32/DATE64
+     * 
+ * + * optional .gandiva.types.DateUnit dateUnit = 5; + * @return The dateUnit. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.DateUnit getDateUnit(); + + /** + *
+     * used by TIME32/TIME64
+     * 
+ * + * optional .gandiva.types.TimeUnit timeUnit = 6; + * @return Whether the timeUnit field is set. + */ + boolean hasTimeUnit(); + /** + *
+     * used by TIME32/TIME64
+     * 
+ * + * optional .gandiva.types.TimeUnit timeUnit = 6; + * @return The enum numeric value on the wire for timeUnit. + */ + int getTimeUnitValue(); + /** + *
+     * used by TIME32/TIME64
+     * 
+ * + * optional .gandiva.types.TimeUnit timeUnit = 6; + * @return The timeUnit. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.TimeUnit getTimeUnit(); + + /** + *
+     * used by TIMESTAMP
+     * 
+ * + * optional string timeZone = 7; + * @return Whether the timeZone field is set. + */ + boolean hasTimeZone(); + /** + *
+     * used by TIMESTAMP
+     * 
+ * + * optional string timeZone = 7; + * @return The timeZone. + */ + java.lang.String getTimeZone(); + /** + *
+     * used by TIMESTAMP
+     * 
+ * + * optional string timeZone = 7; + * @return The bytes for timeZone. + */ + com.google.protobuf.ByteString + getTimeZoneBytes(); + + /** + *
+     * used by INTERVAL
+     * 
+ * + * optional .gandiva.types.IntervalType intervalType = 8; + * @return Whether the intervalType field is set. + */ + boolean hasIntervalType(); + /** + *
+     * used by INTERVAL
+     * 
+ * + * optional .gandiva.types.IntervalType intervalType = 8; + * @return The enum numeric value on the wire for intervalType. + */ + int getIntervalTypeValue(); + /** + *
+     * used by INTERVAL
+     * 
+ * + * optional .gandiva.types.IntervalType intervalType = 8; + * @return The intervalType. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.IntervalType getIntervalType(); + } + /** + * Protobuf type {@code gandiva.types.ExtGandivaType} + */ + public static final class ExtGandivaType extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.ExtGandivaType) + ExtGandivaTypeOrBuilder { + private static final long serialVersionUID = 0L; + // Use ExtGandivaType.newBuilder() to construct. + private ExtGandivaType(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ExtGandivaType() { + type_ = 0; + dateUnit_ = 0; + timeUnit_ = 0; + timeZone_ = ""; + intervalType_ = 0; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new ExtGandivaType(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_ExtGandivaType_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_ExtGandivaType_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.class, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder.class); + } + + private int bitField0_; + public static final int TYPE_FIELD_NUMBER = 1; + private int type_ = 0; + /** + * optional .gandiva.types.GandivaType type = 1; + * @return Whether the type field is set. + */ + @java.lang.Override public boolean hasType() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional .gandiva.types.GandivaType type = 1; + * @return The enum numeric value on the wire for type. + */ + @java.lang.Override public int getTypeValue() { + return type_; + } + /** + * optional .gandiva.types.GandivaType type = 1; + * @return The type. + */ + @java.lang.Override public org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaType getType() { + org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaType result = org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaType.forNumber(type_); + return result == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaType.UNRECOGNIZED : result; + } + + public static final int WIDTH_FIELD_NUMBER = 2; + private int width_ = 0; + /** + *
+     * used by FIXED_SIZE_BINARY
+     * 
+ * + * optional uint32 width = 2; + * @return Whether the width field is set. + */ + @java.lang.Override + public boolean hasWidth() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+     * used by FIXED_SIZE_BINARY
+     * 
+ * + * optional uint32 width = 2; + * @return The width. + */ + @java.lang.Override + public int getWidth() { + return width_; + } + + public static final int PRECISION_FIELD_NUMBER = 3; + private int precision_ = 0; + /** + *
+     * used by DECIMAL
+     * 
+ * + * optional int32 precision = 3; + * @return Whether the precision field is set. + */ + @java.lang.Override + public boolean hasPrecision() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + *
+     * used by DECIMAL
+     * 
+ * + * optional int32 precision = 3; + * @return The precision. + */ + @java.lang.Override + public int getPrecision() { + return precision_; + } + + public static final int SCALE_FIELD_NUMBER = 4; + private int scale_ = 0; + /** + *
+     * used by DECIMAL
+     * 
+ * + * optional int32 scale = 4; + * @return Whether the scale field is set. + */ + @java.lang.Override + public boolean hasScale() { + return ((bitField0_ & 0x00000008) != 0); + } + /** + *
+     * used by DECIMAL
+     * 
+ * + * optional int32 scale = 4; + * @return The scale. + */ + @java.lang.Override + public int getScale() { + return scale_; + } + + public static final int DATEUNIT_FIELD_NUMBER = 5; + private int dateUnit_ = 0; + /** + *
+     * used by DATE32/DATE64
+     * 
+ * + * optional .gandiva.types.DateUnit dateUnit = 5; + * @return Whether the dateUnit field is set. + */ + @java.lang.Override public boolean hasDateUnit() { + return ((bitField0_ & 0x00000010) != 0); + } + /** + *
+     * used by DATE32/DATE64
+     * 
+ * + * optional .gandiva.types.DateUnit dateUnit = 5; + * @return The enum numeric value on the wire for dateUnit. + */ + @java.lang.Override public int getDateUnitValue() { + return dateUnit_; + } + /** + *
+     * used by DATE32/DATE64
+     * 
+ * + * optional .gandiva.types.DateUnit dateUnit = 5; + * @return The dateUnit. + */ + @java.lang.Override public org.apache.arrow.gandiva.ipc.GandivaTypes.DateUnit getDateUnit() { + org.apache.arrow.gandiva.ipc.GandivaTypes.DateUnit result = org.apache.arrow.gandiva.ipc.GandivaTypes.DateUnit.forNumber(dateUnit_); + return result == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.DateUnit.UNRECOGNIZED : result; + } + + public static final int TIMEUNIT_FIELD_NUMBER = 6; + private int timeUnit_ = 0; + /** + *
+     * used by TIME32/TIME64
+     * 
+ * + * optional .gandiva.types.TimeUnit timeUnit = 6; + * @return Whether the timeUnit field is set. + */ + @java.lang.Override public boolean hasTimeUnit() { + return ((bitField0_ & 0x00000020) != 0); + } + /** + *
+     * used by TIME32/TIME64
+     * 
+ * + * optional .gandiva.types.TimeUnit timeUnit = 6; + * @return The enum numeric value on the wire for timeUnit. + */ + @java.lang.Override public int getTimeUnitValue() { + return timeUnit_; + } + /** + *
+     * used by TIME32/TIME64
+     * 
+ * + * optional .gandiva.types.TimeUnit timeUnit = 6; + * @return The timeUnit. + */ + @java.lang.Override public org.apache.arrow.gandiva.ipc.GandivaTypes.TimeUnit getTimeUnit() { + org.apache.arrow.gandiva.ipc.GandivaTypes.TimeUnit result = org.apache.arrow.gandiva.ipc.GandivaTypes.TimeUnit.forNumber(timeUnit_); + return result == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.TimeUnit.UNRECOGNIZED : result; + } + + public static final int TIMEZONE_FIELD_NUMBER = 7; + @SuppressWarnings("serial") + private volatile java.lang.Object timeZone_ = ""; + /** + *
+     * used by TIMESTAMP
+     * 
+ * + * optional string timeZone = 7; + * @return Whether the timeZone field is set. + */ + @java.lang.Override + public boolean hasTimeZone() { + return ((bitField0_ & 0x00000040) != 0); + } + /** + *
+     * used by TIMESTAMP
+     * 
+ * + * optional string timeZone = 7; + * @return The timeZone. + */ + @java.lang.Override + public java.lang.String getTimeZone() { + java.lang.Object ref = timeZone_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + timeZone_ = s; + return s; + } + } + /** + *
+     * used by TIMESTAMP
+     * 
+ * + * optional string timeZone = 7; + * @return The bytes for timeZone. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getTimeZoneBytes() { + java.lang.Object ref = timeZone_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + timeZone_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int INTERVALTYPE_FIELD_NUMBER = 8; + private int intervalType_ = 0; + /** + *
+     * used by INTERVAL
+     * 
+ * + * optional .gandiva.types.IntervalType intervalType = 8; + * @return Whether the intervalType field is set. + */ + @java.lang.Override public boolean hasIntervalType() { + return ((bitField0_ & 0x00000080) != 0); + } + /** + *
+     * used by INTERVAL
+     * 
+ * + * optional .gandiva.types.IntervalType intervalType = 8; + * @return The enum numeric value on the wire for intervalType. + */ + @java.lang.Override public int getIntervalTypeValue() { + return intervalType_; + } + /** + *
+     * used by INTERVAL
+     * 
+ * + * optional .gandiva.types.IntervalType intervalType = 8; + * @return The intervalType. + */ + @java.lang.Override public org.apache.arrow.gandiva.ipc.GandivaTypes.IntervalType getIntervalType() { + org.apache.arrow.gandiva.ipc.GandivaTypes.IntervalType result = org.apache.arrow.gandiva.ipc.GandivaTypes.IntervalType.forNumber(intervalType_); + return result == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.IntervalType.UNRECOGNIZED : result; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + output.writeEnum(1, type_); + } + if (((bitField0_ & 0x00000002) != 0)) { + output.writeUInt32(2, width_); + } + if (((bitField0_ & 0x00000004) != 0)) { + output.writeInt32(3, precision_); + } + if (((bitField0_ & 0x00000008) != 0)) { + output.writeInt32(4, scale_); + } + if (((bitField0_ & 0x00000010) != 0)) { + output.writeEnum(5, dateUnit_); + } + if (((bitField0_ & 0x00000020) != 0)) { + output.writeEnum(6, timeUnit_); + } + if (((bitField0_ & 0x00000040) != 0)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 7, timeZone_); + } + if (((bitField0_ & 0x00000080) != 0)) { + output.writeEnum(8, intervalType_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(1, type_); + } + if (((bitField0_ & 0x00000002) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, width_); + } + if (((bitField0_ & 0x00000004) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(3, precision_); + } + if (((bitField0_ & 0x00000008) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(4, scale_); + } + if (((bitField0_ & 0x00000010) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(5, dateUnit_); + } + if (((bitField0_ & 0x00000020) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(6, timeUnit_); + } + if (((bitField0_ & 0x00000040) != 0)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, timeZone_); + } + if (((bitField0_ & 0x00000080) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(8, intervalType_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType other = (org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType) obj; + + if (hasType() != other.hasType()) return false; + if (hasType()) { + if (type_ != other.type_) return false; + } + if (hasWidth() != other.hasWidth()) return false; + if (hasWidth()) { + if (getWidth() + != other.getWidth()) return false; + } + if (hasPrecision() != other.hasPrecision()) return false; + if (hasPrecision()) { + if (getPrecision() + != other.getPrecision()) return false; + } + if (hasScale() != other.hasScale()) return false; + if (hasScale()) { + if (getScale() + != other.getScale()) return false; + } + if (hasDateUnit() != other.hasDateUnit()) return false; + if (hasDateUnit()) { + if (dateUnit_ != other.dateUnit_) return false; + } + if (hasTimeUnit() != other.hasTimeUnit()) return false; + if (hasTimeUnit()) { + if (timeUnit_ != other.timeUnit_) return false; + } + if (hasTimeZone() != other.hasTimeZone()) return false; + if (hasTimeZone()) { + if (!getTimeZone() + .equals(other.getTimeZone())) return false; + } + if (hasIntervalType() != other.hasIntervalType()) return false; + if (hasIntervalType()) { + if (intervalType_ != other.intervalType_) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasType()) { + hash = (37 * hash) + TYPE_FIELD_NUMBER; + hash = (53 * hash) + type_; + } + if (hasWidth()) { + hash = (37 * hash) + WIDTH_FIELD_NUMBER; + hash = (53 * hash) + getWidth(); + } + if (hasPrecision()) { + hash = (37 * hash) + PRECISION_FIELD_NUMBER; + hash = (53 * hash) + getPrecision(); + } + if (hasScale()) { + hash = (37 * hash) + SCALE_FIELD_NUMBER; + hash = (53 * hash) + getScale(); + } + if (hasDateUnit()) { + hash = (37 * hash) + DATEUNIT_FIELD_NUMBER; + hash = (53 * hash) + dateUnit_; + } + if (hasTimeUnit()) { + hash = (37 * hash) + TIMEUNIT_FIELD_NUMBER; + hash = (53 * hash) + timeUnit_; + } + if (hasTimeZone()) { + hash = (37 * hash) + TIMEZONE_FIELD_NUMBER; + hash = (53 * hash) + getTimeZone().hashCode(); + } + if (hasIntervalType()) { + hash = (37 * hash) + INTERVALTYPE_FIELD_NUMBER; + hash = (53 * hash) + intervalType_; + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.ExtGandivaType} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.ExtGandivaType) + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_ExtGandivaType_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_ExtGandivaType_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.class, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + type_ = 0; + width_ = 0; + precision_ = 0; + scale_ = 0; + dateUnit_ = 0; + timeUnit_ = 0; + timeZone_ = ""; + intervalType_ = 0; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_ExtGandivaType_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType result = new org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.type_ = type_; + to_bitField0_ |= 0x00000001; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.width_ = width_; + to_bitField0_ |= 0x00000002; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.precision_ = precision_; + to_bitField0_ |= 0x00000004; + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.scale_ = scale_; + to_bitField0_ |= 0x00000008; + } + if (((from_bitField0_ & 0x00000010) != 0)) { + result.dateUnit_ = dateUnit_; + to_bitField0_ |= 0x00000010; + } + if (((from_bitField0_ & 0x00000020) != 0)) { + result.timeUnit_ = timeUnit_; + to_bitField0_ |= 0x00000020; + } + if (((from_bitField0_ & 0x00000040) != 0)) { + result.timeZone_ = timeZone_; + to_bitField0_ |= 0x00000040; + } + if (((from_bitField0_ & 0x00000080) != 0)) { + result.intervalType_ = intervalType_; + to_bitField0_ |= 0x00000080; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance()) return this; + if (other.hasType()) { + setType(other.getType()); + } + if (other.hasWidth()) { + setWidth(other.getWidth()); + } + if (other.hasPrecision()) { + setPrecision(other.getPrecision()); + } + if (other.hasScale()) { + setScale(other.getScale()); + } + if (other.hasDateUnit()) { + setDateUnit(other.getDateUnit()); + } + if (other.hasTimeUnit()) { + setTimeUnit(other.getTimeUnit()); + } + if (other.hasTimeZone()) { + timeZone_ = other.timeZone_; + bitField0_ |= 0x00000040; + onChanged(); + } + if (other.hasIntervalType()) { + setIntervalType(other.getIntervalType()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + type_ = input.readEnum(); + bitField0_ |= 0x00000001; + break; + } // case 8 + case 16: { + width_ = input.readUInt32(); + bitField0_ |= 0x00000002; + break; + } // case 16 + case 24: { + precision_ = input.readInt32(); + bitField0_ |= 0x00000004; + break; + } // case 24 + case 32: { + scale_ = input.readInt32(); + bitField0_ |= 0x00000008; + break; + } // case 32 + case 40: { + dateUnit_ = input.readEnum(); + bitField0_ |= 0x00000010; + break; + } // case 40 + case 48: { + timeUnit_ = input.readEnum(); + bitField0_ |= 0x00000020; + break; + } // case 48 + case 58: { + timeZone_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000040; + break; + } // case 58 + case 64: { + intervalType_ = input.readEnum(); + bitField0_ |= 0x00000080; + break; + } // case 64 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private int type_ = 0; + /** + * optional .gandiva.types.GandivaType type = 1; + * @return Whether the type field is set. + */ + @java.lang.Override public boolean hasType() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional .gandiva.types.GandivaType type = 1; + * @return The enum numeric value on the wire for type. + */ + @java.lang.Override public int getTypeValue() { + return type_; + } + /** + * optional .gandiva.types.GandivaType type = 1; + * @param value The enum numeric value on the wire for type to set. + * @return This builder for chaining. + */ + public Builder setTypeValue(int value) { + type_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional .gandiva.types.GandivaType type = 1; + * @return The type. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaType getType() { + org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaType result = org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaType.forNumber(type_); + return result == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaType.UNRECOGNIZED : result; + } + /** + * optional .gandiva.types.GandivaType type = 1; + * @param value The type to set. + * @return This builder for chaining. + */ + public Builder setType(org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + type_ = value.getNumber(); + onChanged(); + return this; + } + /** + * optional .gandiva.types.GandivaType type = 1; + * @return This builder for chaining. + */ + public Builder clearType() { + bitField0_ = (bitField0_ & ~0x00000001); + type_ = 0; + onChanged(); + return this; + } + + private int width_ ; + /** + *
+       * used by FIXED_SIZE_BINARY
+       * 
+ * + * optional uint32 width = 2; + * @return Whether the width field is set. + */ + @java.lang.Override + public boolean hasWidth() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + *
+       * used by FIXED_SIZE_BINARY
+       * 
+ * + * optional uint32 width = 2; + * @return The width. + */ + @java.lang.Override + public int getWidth() { + return width_; + } + /** + *
+       * used by FIXED_SIZE_BINARY
+       * 
+ * + * optional uint32 width = 2; + * @param value The width to set. + * @return This builder for chaining. + */ + public Builder setWidth(int value) { + + width_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + *
+       * used by FIXED_SIZE_BINARY
+       * 
+ * + * optional uint32 width = 2; + * @return This builder for chaining. + */ + public Builder clearWidth() { + bitField0_ = (bitField0_ & ~0x00000002); + width_ = 0; + onChanged(); + return this; + } + + private int precision_ ; + /** + *
+       * used by DECIMAL
+       * 
+ * + * optional int32 precision = 3; + * @return Whether the precision field is set. + */ + @java.lang.Override + public boolean hasPrecision() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + *
+       * used by DECIMAL
+       * 
+ * + * optional int32 precision = 3; + * @return The precision. + */ + @java.lang.Override + public int getPrecision() { + return precision_; + } + /** + *
+       * used by DECIMAL
+       * 
+ * + * optional int32 precision = 3; + * @param value The precision to set. + * @return This builder for chaining. + */ + public Builder setPrecision(int value) { + + precision_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       * used by DECIMAL
+       * 
+ * + * optional int32 precision = 3; + * @return This builder for chaining. + */ + public Builder clearPrecision() { + bitField0_ = (bitField0_ & ~0x00000004); + precision_ = 0; + onChanged(); + return this; + } + + private int scale_ ; + /** + *
+       * used by DECIMAL
+       * 
+ * + * optional int32 scale = 4; + * @return Whether the scale field is set. + */ + @java.lang.Override + public boolean hasScale() { + return ((bitField0_ & 0x00000008) != 0); + } + /** + *
+       * used by DECIMAL
+       * 
+ * + * optional int32 scale = 4; + * @return The scale. + */ + @java.lang.Override + public int getScale() { + return scale_; + } + /** + *
+       * used by DECIMAL
+       * 
+ * + * optional int32 scale = 4; + * @param value The scale to set. + * @return This builder for chaining. + */ + public Builder setScale(int value) { + + scale_ = value; + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + *
+       * used by DECIMAL
+       * 
+ * + * optional int32 scale = 4; + * @return This builder for chaining. + */ + public Builder clearScale() { + bitField0_ = (bitField0_ & ~0x00000008); + scale_ = 0; + onChanged(); + return this; + } + + private int dateUnit_ = 0; + /** + *
+       * used by DATE32/DATE64
+       * 
+ * + * optional .gandiva.types.DateUnit dateUnit = 5; + * @return Whether the dateUnit field is set. + */ + @java.lang.Override public boolean hasDateUnit() { + return ((bitField0_ & 0x00000010) != 0); + } + /** + *
+       * used by DATE32/DATE64
+       * 
+ * + * optional .gandiva.types.DateUnit dateUnit = 5; + * @return The enum numeric value on the wire for dateUnit. + */ + @java.lang.Override public int getDateUnitValue() { + return dateUnit_; + } + /** + *
+       * used by DATE32/DATE64
+       * 
+ * + * optional .gandiva.types.DateUnit dateUnit = 5; + * @param value The enum numeric value on the wire for dateUnit to set. + * @return This builder for chaining. + */ + public Builder setDateUnitValue(int value) { + dateUnit_ = value; + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + *
+       * used by DATE32/DATE64
+       * 
+ * + * optional .gandiva.types.DateUnit dateUnit = 5; + * @return The dateUnit. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DateUnit getDateUnit() { + org.apache.arrow.gandiva.ipc.GandivaTypes.DateUnit result = org.apache.arrow.gandiva.ipc.GandivaTypes.DateUnit.forNumber(dateUnit_); + return result == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.DateUnit.UNRECOGNIZED : result; + } + /** + *
+       * used by DATE32/DATE64
+       * 
+ * + * optional .gandiva.types.DateUnit dateUnit = 5; + * @param value The dateUnit to set. + * @return This builder for chaining. + */ + public Builder setDateUnit(org.apache.arrow.gandiva.ipc.GandivaTypes.DateUnit value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000010; + dateUnit_ = value.getNumber(); + onChanged(); + return this; + } + /** + *
+       * used by DATE32/DATE64
+       * 
+ * + * optional .gandiva.types.DateUnit dateUnit = 5; + * @return This builder for chaining. + */ + public Builder clearDateUnit() { + bitField0_ = (bitField0_ & ~0x00000010); + dateUnit_ = 0; + onChanged(); + return this; + } + + private int timeUnit_ = 0; + /** + *
+       * used by TIME32/TIME64
+       * 
+ * + * optional .gandiva.types.TimeUnit timeUnit = 6; + * @return Whether the timeUnit field is set. + */ + @java.lang.Override public boolean hasTimeUnit() { + return ((bitField0_ & 0x00000020) != 0); + } + /** + *
+       * used by TIME32/TIME64
+       * 
+ * + * optional .gandiva.types.TimeUnit timeUnit = 6; + * @return The enum numeric value on the wire for timeUnit. + */ + @java.lang.Override public int getTimeUnitValue() { + return timeUnit_; + } + /** + *
+       * used by TIME32/TIME64
+       * 
+ * + * optional .gandiva.types.TimeUnit timeUnit = 6; + * @param value The enum numeric value on the wire for timeUnit to set. + * @return This builder for chaining. + */ + public Builder setTimeUnitValue(int value) { + timeUnit_ = value; + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + /** + *
+       * used by TIME32/TIME64
+       * 
+ * + * optional .gandiva.types.TimeUnit timeUnit = 6; + * @return The timeUnit. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TimeUnit getTimeUnit() { + org.apache.arrow.gandiva.ipc.GandivaTypes.TimeUnit result = org.apache.arrow.gandiva.ipc.GandivaTypes.TimeUnit.forNumber(timeUnit_); + return result == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.TimeUnit.UNRECOGNIZED : result; + } + /** + *
+       * used by TIME32/TIME64
+       * 
+ * + * optional .gandiva.types.TimeUnit timeUnit = 6; + * @param value The timeUnit to set. + * @return This builder for chaining. + */ + public Builder setTimeUnit(org.apache.arrow.gandiva.ipc.GandivaTypes.TimeUnit value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000020; + timeUnit_ = value.getNumber(); + onChanged(); + return this; + } + /** + *
+       * used by TIME32/TIME64
+       * 
+ * + * optional .gandiva.types.TimeUnit timeUnit = 6; + * @return This builder for chaining. + */ + public Builder clearTimeUnit() { + bitField0_ = (bitField0_ & ~0x00000020); + timeUnit_ = 0; + onChanged(); + return this; + } + + private java.lang.Object timeZone_ = ""; + /** + *
+       * used by TIMESTAMP
+       * 
+ * + * optional string timeZone = 7; + * @return Whether the timeZone field is set. + */ + public boolean hasTimeZone() { + return ((bitField0_ & 0x00000040) != 0); + } + /** + *
+       * used by TIMESTAMP
+       * 
+ * + * optional string timeZone = 7; + * @return The timeZone. + */ + public java.lang.String getTimeZone() { + java.lang.Object ref = timeZone_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + timeZone_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * used by TIMESTAMP
+       * 
+ * + * optional string timeZone = 7; + * @return The bytes for timeZone. + */ + public com.google.protobuf.ByteString + getTimeZoneBytes() { + java.lang.Object ref = timeZone_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + timeZone_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * used by TIMESTAMP
+       * 
+ * + * optional string timeZone = 7; + * @param value The timeZone to set. + * @return This builder for chaining. + */ + public Builder setTimeZone( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + timeZone_ = value; + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + /** + *
+       * used by TIMESTAMP
+       * 
+ * + * optional string timeZone = 7; + * @return This builder for chaining. + */ + public Builder clearTimeZone() { + timeZone_ = getDefaultInstance().getTimeZone(); + bitField0_ = (bitField0_ & ~0x00000040); + onChanged(); + return this; + } + /** + *
+       * used by TIMESTAMP
+       * 
+ * + * optional string timeZone = 7; + * @param value The bytes for timeZone to set. + * @return This builder for chaining. + */ + public Builder setTimeZoneBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + timeZone_ = value; + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + + private int intervalType_ = 0; + /** + *
+       * used by INTERVAL
+       * 
+ * + * optional .gandiva.types.IntervalType intervalType = 8; + * @return Whether the intervalType field is set. + */ + @java.lang.Override public boolean hasIntervalType() { + return ((bitField0_ & 0x00000080) != 0); + } + /** + *
+       * used by INTERVAL
+       * 
+ * + * optional .gandiva.types.IntervalType intervalType = 8; + * @return The enum numeric value on the wire for intervalType. + */ + @java.lang.Override public int getIntervalTypeValue() { + return intervalType_; + } + /** + *
+       * used by INTERVAL
+       * 
+ * + * optional .gandiva.types.IntervalType intervalType = 8; + * @param value The enum numeric value on the wire for intervalType to set. + * @return This builder for chaining. + */ + public Builder setIntervalTypeValue(int value) { + intervalType_ = value; + bitField0_ |= 0x00000080; + onChanged(); + return this; + } + /** + *
+       * used by INTERVAL
+       * 
+ * + * optional .gandiva.types.IntervalType intervalType = 8; + * @return The intervalType. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntervalType getIntervalType() { + org.apache.arrow.gandiva.ipc.GandivaTypes.IntervalType result = org.apache.arrow.gandiva.ipc.GandivaTypes.IntervalType.forNumber(intervalType_); + return result == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.IntervalType.UNRECOGNIZED : result; + } + /** + *
+       * used by INTERVAL
+       * 
+ * + * optional .gandiva.types.IntervalType intervalType = 8; + * @param value The intervalType to set. + * @return This builder for chaining. + */ + public Builder setIntervalType(org.apache.arrow.gandiva.ipc.GandivaTypes.IntervalType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000080; + intervalType_ = value.getNumber(); + onChanged(); + return this; + } + /** + *
+       * used by INTERVAL
+       * 
+ * + * optional .gandiva.types.IntervalType intervalType = 8; + * @return This builder for chaining. + */ + public Builder clearIntervalType() { + bitField0_ = (bitField0_ & ~0x00000080); + intervalType_ = 0; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.ExtGandivaType) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.ExtGandivaType) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ExtGandivaType parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface FieldOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.Field) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * name of the field
+     * 
+ * + * optional string name = 1; + * @return Whether the name field is set. + */ + boolean hasName(); + /** + *
+     * name of the field
+     * 
+ * + * optional string name = 1; + * @return The name. + */ + java.lang.String getName(); + /** + *
+     * name of the field
+     * 
+ * + * optional string name = 1; + * @return The bytes for name. + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + * optional .gandiva.types.ExtGandivaType type = 2; + * @return Whether the type field is set. + */ + boolean hasType(); + /** + * optional .gandiva.types.ExtGandivaType type = 2; + * @return The type. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getType(); + /** + * optional .gandiva.types.ExtGandivaType type = 2; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder getTypeOrBuilder(); + + /** + * optional bool nullable = 3; + * @return Whether the nullable field is set. + */ + boolean hasNullable(); + /** + * optional bool nullable = 3; + * @return The nullable. + */ + boolean getNullable(); + + /** + *
+     * for complex data types like structs, unions
+     * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + java.util.List + getChildrenList(); + /** + *
+     * for complex data types like structs, unions
+     * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.Field getChildren(int index); + /** + *
+     * for complex data types like structs, unions
+     * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + int getChildrenCount(); + /** + *
+     * for complex data types like structs, unions
+     * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + java.util.List + getChildrenOrBuilderList(); + /** + *
+     * for complex data types like structs, unions
+     * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder getChildrenOrBuilder( + int index); + } + /** + * Protobuf type {@code gandiva.types.Field} + */ + public static final class Field extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.Field) + FieldOrBuilder { + private static final long serialVersionUID = 0L; + // Use Field.newBuilder() to construct. + private Field(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Field() { + name_ = ""; + children_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new Field(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_Field_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_Field_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.Field.class, org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder.class); + } + + private int bitField0_; + public static final int NAME_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + *
+     * name of the field
+     * 
+ * + * optional string name = 1; + * @return Whether the name field is set. + */ + @java.lang.Override + public boolean hasName() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+     * name of the field
+     * 
+ * + * optional string name = 1; + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + *
+     * name of the field
+     * 
+ * + * optional string name = 1; + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int TYPE_FIELD_NUMBER = 2; + private org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType type_; + /** + * optional .gandiva.types.ExtGandivaType type = 2; + * @return Whether the type field is set. + */ + @java.lang.Override + public boolean hasType() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * optional .gandiva.types.ExtGandivaType type = 2; + * @return The type. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getType() { + return type_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance() : type_; + } + /** + * optional .gandiva.types.ExtGandivaType type = 2; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder getTypeOrBuilder() { + return type_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance() : type_; + } + + public static final int NULLABLE_FIELD_NUMBER = 3; + private boolean nullable_ = false; + /** + * optional bool nullable = 3; + * @return Whether the nullable field is set. + */ + @java.lang.Override + public boolean hasNullable() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + * optional bool nullable = 3; + * @return The nullable. + */ + @java.lang.Override + public boolean getNullable() { + return nullable_; + } + + public static final int CHILDREN_FIELD_NUMBER = 4; + @SuppressWarnings("serial") + private java.util.List children_; + /** + *
+     * for complex data types like structs, unions
+     * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + @java.lang.Override + public java.util.List getChildrenList() { + return children_; + } + /** + *
+     * for complex data types like structs, unions
+     * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + @java.lang.Override + public java.util.List + getChildrenOrBuilderList() { + return children_; + } + /** + *
+     * for complex data types like structs, unions
+     * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + @java.lang.Override + public int getChildrenCount() { + return children_.size(); + } + /** + *
+     * for complex data types like structs, unions
+     * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.Field getChildren(int index) { + return children_.get(index); + } + /** + *
+     * for complex data types like structs, unions
+     * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder getChildrenOrBuilder( + int index) { + return children_.get(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (((bitField0_ & 0x00000002) != 0)) { + output.writeMessage(2, getType()); + } + if (((bitField0_ & 0x00000004) != 0)) { + output.writeBool(3, nullable_); + } + for (int i = 0; i < children_.size(); i++) { + output.writeMessage(4, children_.get(i)); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (((bitField0_ & 0x00000002) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, getType()); + } + if (((bitField0_ & 0x00000004) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(3, nullable_); + } + for (int i = 0; i < children_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, children_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.Field)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.Field other = (org.apache.arrow.gandiva.ipc.GandivaTypes.Field) obj; + + if (hasName() != other.hasName()) return false; + if (hasName()) { + if (!getName() + .equals(other.getName())) return false; + } + if (hasType() != other.hasType()) return false; + if (hasType()) { + if (!getType() + .equals(other.getType())) return false; + } + if (hasNullable() != other.hasNullable()) return false; + if (hasNullable()) { + if (getNullable() + != other.getNullable()) return false; + } + if (!getChildrenList() + .equals(other.getChildrenList())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasName()) { + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + } + if (hasType()) { + hash = (37 * hash) + TYPE_FIELD_NUMBER; + hash = (53 * hash) + getType().hashCode(); + } + if (hasNullable()) { + hash = (37 * hash) + NULLABLE_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getNullable()); + } + if (getChildrenCount() > 0) { + hash = (37 * hash) + CHILDREN_FIELD_NUMBER; + hash = (53 * hash) + getChildrenList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Field parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Field parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Field parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Field parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Field parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Field parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Field parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Field parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Field parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Field parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Field parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Field parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.Field prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.Field} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.Field) + org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_Field_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_Field_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.Field.class, org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.Field.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getTypeFieldBuilder(); + getChildrenFieldBuilder(); + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + name_ = ""; + type_ = null; + if (typeBuilder_ != null) { + typeBuilder_.dispose(); + typeBuilder_ = null; + } + nullable_ = false; + if (childrenBuilder_ == null) { + children_ = java.util.Collections.emptyList(); + } else { + children_ = null; + childrenBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_Field_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.Field getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.Field.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.Field build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.Field result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.Field buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.Field result = new org.apache.arrow.gandiva.ipc.GandivaTypes.Field(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields(org.apache.arrow.gandiva.ipc.GandivaTypes.Field result) { + if (childrenBuilder_ == null) { + if (((bitField0_ & 0x00000008) != 0)) { + children_ = java.util.Collections.unmodifiableList(children_); + bitField0_ = (bitField0_ & ~0x00000008); + } + result.children_ = children_; + } else { + result.children_ = childrenBuilder_.build(); + } + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.Field result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.name_ = name_; + to_bitField0_ |= 0x00000001; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.type_ = typeBuilder_ == null + ? type_ + : typeBuilder_.build(); + to_bitField0_ |= 0x00000002; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.nullable_ = nullable_; + to_bitField0_ |= 0x00000004; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.Field) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.Field)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.Field other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.Field.getDefaultInstance()) return this; + if (other.hasName()) { + name_ = other.name_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.hasType()) { + mergeType(other.getType()); + } + if (other.hasNullable()) { + setNullable(other.getNullable()); + } + if (childrenBuilder_ == null) { + if (!other.children_.isEmpty()) { + if (children_.isEmpty()) { + children_ = other.children_; + bitField0_ = (bitField0_ & ~0x00000008); + } else { + ensureChildrenIsMutable(); + children_.addAll(other.children_); + } + onChanged(); + } + } else { + if (!other.children_.isEmpty()) { + if (childrenBuilder_.isEmpty()) { + childrenBuilder_.dispose(); + childrenBuilder_ = null; + children_ = other.children_; + bitField0_ = (bitField0_ & ~0x00000008); + childrenBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getChildrenFieldBuilder() : null; + } else { + childrenBuilder_.addAllMessages(other.children_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + input.readMessage( + getTypeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 24: { + nullable_ = input.readBool(); + bitField0_ |= 0x00000004; + break; + } // case 24 + case 34: { + org.apache.arrow.gandiva.ipc.GandivaTypes.Field m = + input.readMessage( + org.apache.arrow.gandiva.ipc.GandivaTypes.Field.parser(), + extensionRegistry); + if (childrenBuilder_ == null) { + ensureChildrenIsMutable(); + children_.add(m); + } else { + childrenBuilder_.addMessage(m); + } + break; + } // case 34 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + *
+       * name of the field
+       * 
+ * + * optional string name = 1; + * @return Whether the name field is set. + */ + public boolean hasName() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + *
+       * name of the field
+       * 
+ * + * optional string name = 1; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * name of the field
+       * 
+ * + * optional string name = 1; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * name of the field
+       * 
+ * + * optional string name = 1; + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + *
+       * name of the field
+       * 
+ * + * optional string name = 1; + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + *
+       * name of the field
+       * 
+ * + * optional string name = 1; + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType type_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder> typeBuilder_; + /** + * optional .gandiva.types.ExtGandivaType type = 2; + * @return Whether the type field is set. + */ + public boolean hasType() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * optional .gandiva.types.ExtGandivaType type = 2; + * @return The type. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getType() { + if (typeBuilder_ == null) { + return type_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance() : type_; + } else { + return typeBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.ExtGandivaType type = 2; + */ + public Builder setType(org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType value) { + if (typeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + type_ = value; + } else { + typeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * optional .gandiva.types.ExtGandivaType type = 2; + */ + public Builder setType( + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder builderForValue) { + if (typeBuilder_ == null) { + type_ = builderForValue.build(); + } else { + typeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * optional .gandiva.types.ExtGandivaType type = 2; + */ + public Builder mergeType(org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType value) { + if (typeBuilder_ == null) { + if (((bitField0_ & 0x00000002) != 0) && + type_ != null && + type_ != org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance()) { + getTypeBuilder().mergeFrom(value); + } else { + type_ = value; + } + } else { + typeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * optional .gandiva.types.ExtGandivaType type = 2; + */ + public Builder clearType() { + bitField0_ = (bitField0_ & ~0x00000002); + type_ = null; + if (typeBuilder_ != null) { + typeBuilder_.dispose(); + typeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.ExtGandivaType type = 2; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder getTypeBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getTypeFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.ExtGandivaType type = 2; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder getTypeOrBuilder() { + if (typeBuilder_ != null) { + return typeBuilder_.getMessageOrBuilder(); + } else { + return type_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance() : type_; + } + } + /** + * optional .gandiva.types.ExtGandivaType type = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder> + getTypeFieldBuilder() { + if (typeBuilder_ == null) { + typeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder>( + getType(), + getParentForChildren(), + isClean()); + type_ = null; + } + return typeBuilder_; + } + + private boolean nullable_ ; + /** + * optional bool nullable = 3; + * @return Whether the nullable field is set. + */ + @java.lang.Override + public boolean hasNullable() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + * optional bool nullable = 3; + * @return The nullable. + */ + @java.lang.Override + public boolean getNullable() { + return nullable_; + } + /** + * optional bool nullable = 3; + * @param value The nullable to set. + * @return This builder for chaining. + */ + public Builder setNullable(boolean value) { + + nullable_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * optional bool nullable = 3; + * @return This builder for chaining. + */ + public Builder clearNullable() { + bitField0_ = (bitField0_ & ~0x00000004); + nullable_ = false; + onChanged(); + return this; + } + + private java.util.List children_ = + java.util.Collections.emptyList(); + private void ensureChildrenIsMutable() { + if (!((bitField0_ & 0x00000008) != 0)) { + children_ = new java.util.ArrayList(children_); + bitField0_ |= 0x00000008; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.Field, org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder> childrenBuilder_; + + /** + *
+       * for complex data types like structs, unions
+       * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + public java.util.List getChildrenList() { + if (childrenBuilder_ == null) { + return java.util.Collections.unmodifiableList(children_); + } else { + return childrenBuilder_.getMessageList(); + } + } + /** + *
+       * for complex data types like structs, unions
+       * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + public int getChildrenCount() { + if (childrenBuilder_ == null) { + return children_.size(); + } else { + return childrenBuilder_.getCount(); + } + } + /** + *
+       * for complex data types like structs, unions
+       * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.Field getChildren(int index) { + if (childrenBuilder_ == null) { + return children_.get(index); + } else { + return childrenBuilder_.getMessage(index); + } + } + /** + *
+       * for complex data types like structs, unions
+       * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + public Builder setChildren( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.Field value) { + if (childrenBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureChildrenIsMutable(); + children_.set(index, value); + onChanged(); + } else { + childrenBuilder_.setMessage(index, value); + } + return this; + } + /** + *
+       * for complex data types like structs, unions
+       * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + public Builder setChildren( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder builderForValue) { + if (childrenBuilder_ == null) { + ensureChildrenIsMutable(); + children_.set(index, builderForValue.build()); + onChanged(); + } else { + childrenBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+       * for complex data types like structs, unions
+       * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + public Builder addChildren(org.apache.arrow.gandiva.ipc.GandivaTypes.Field value) { + if (childrenBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureChildrenIsMutable(); + children_.add(value); + onChanged(); + } else { + childrenBuilder_.addMessage(value); + } + return this; + } + /** + *
+       * for complex data types like structs, unions
+       * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + public Builder addChildren( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.Field value) { + if (childrenBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureChildrenIsMutable(); + children_.add(index, value); + onChanged(); + } else { + childrenBuilder_.addMessage(index, value); + } + return this; + } + /** + *
+       * for complex data types like structs, unions
+       * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + public Builder addChildren( + org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder builderForValue) { + if (childrenBuilder_ == null) { + ensureChildrenIsMutable(); + children_.add(builderForValue.build()); + onChanged(); + } else { + childrenBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + *
+       * for complex data types like structs, unions
+       * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + public Builder addChildren( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder builderForValue) { + if (childrenBuilder_ == null) { + ensureChildrenIsMutable(); + children_.add(index, builderForValue.build()); + onChanged(); + } else { + childrenBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+       * for complex data types like structs, unions
+       * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + public Builder addAllChildren( + java.lang.Iterable values) { + if (childrenBuilder_ == null) { + ensureChildrenIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, children_); + onChanged(); + } else { + childrenBuilder_.addAllMessages(values); + } + return this; + } + /** + *
+       * for complex data types like structs, unions
+       * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + public Builder clearChildren() { + if (childrenBuilder_ == null) { + children_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + } else { + childrenBuilder_.clear(); + } + return this; + } + /** + *
+       * for complex data types like structs, unions
+       * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + public Builder removeChildren(int index) { + if (childrenBuilder_ == null) { + ensureChildrenIsMutable(); + children_.remove(index); + onChanged(); + } else { + childrenBuilder_.remove(index); + } + return this; + } + /** + *
+       * for complex data types like structs, unions
+       * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder getChildrenBuilder( + int index) { + return getChildrenFieldBuilder().getBuilder(index); + } + /** + *
+       * for complex data types like structs, unions
+       * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder getChildrenOrBuilder( + int index) { + if (childrenBuilder_ == null) { + return children_.get(index); } else { + return childrenBuilder_.getMessageOrBuilder(index); + } + } + /** + *
+       * for complex data types like structs, unions
+       * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + public java.util.List + getChildrenOrBuilderList() { + if (childrenBuilder_ != null) { + return childrenBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(children_); + } + } + /** + *
+       * for complex data types like structs, unions
+       * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder addChildrenBuilder() { + return getChildrenFieldBuilder().addBuilder( + org.apache.arrow.gandiva.ipc.GandivaTypes.Field.getDefaultInstance()); + } + /** + *
+       * for complex data types like structs, unions
+       * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder addChildrenBuilder( + int index) { + return getChildrenFieldBuilder().addBuilder( + index, org.apache.arrow.gandiva.ipc.GandivaTypes.Field.getDefaultInstance()); + } + /** + *
+       * for complex data types like structs, unions
+       * 
+ * + * repeated .gandiva.types.Field children = 4; + */ + public java.util.List + getChildrenBuilderList() { + return getChildrenFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.Field, org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder> + getChildrenFieldBuilder() { + if (childrenBuilder_ == null) { + childrenBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.Field, org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder>( + children_, + ((bitField0_ & 0x00000008) != 0), + getParentForChildren(), + isClean()); + children_ = null; + } + return childrenBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.Field) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.Field) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.Field DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.Field(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Field getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Field parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.Field getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface FieldNodeOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.FieldNode) + com.google.protobuf.MessageOrBuilder { + + /** + * optional .gandiva.types.Field field = 1; + * @return Whether the field field is set. + */ + boolean hasField(); + /** + * optional .gandiva.types.Field field = 1; + * @return The field. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.Field getField(); + /** + * optional .gandiva.types.Field field = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder getFieldOrBuilder(); + } + /** + * Protobuf type {@code gandiva.types.FieldNode} + */ + public static final class FieldNode extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.FieldNode) + FieldNodeOrBuilder { + private static final long serialVersionUID = 0L; + // Use FieldNode.newBuilder() to construct. + private FieldNode(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private FieldNode() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new FieldNode(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FieldNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FieldNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode.Builder.class); + } + + private int bitField0_; + public static final int FIELD_FIELD_NUMBER = 1; + private org.apache.arrow.gandiva.ipc.GandivaTypes.Field field_; + /** + * optional .gandiva.types.Field field = 1; + * @return Whether the field field is set. + */ + @java.lang.Override + public boolean hasField() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional .gandiva.types.Field field = 1; + * @return The field. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.Field getField() { + return field_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.Field.getDefaultInstance() : field_; + } + /** + * optional .gandiva.types.Field field = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder getFieldOrBuilder() { + return field_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.Field.getDefaultInstance() : field_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + output.writeMessage(1, getField()); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getField()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode other = (org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode) obj; + + if (hasField() != other.hasField()) return false; + if (hasField()) { + if (!getField() + .equals(other.getField())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasField()) { + hash = (37 * hash) + FIELD_FIELD_NUMBER; + hash = (53 * hash) + getField().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.FieldNode} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.FieldNode) + org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNodeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FieldNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FieldNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getFieldFieldBuilder(); + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + field_ = null; + if (fieldBuilder_ != null) { + fieldBuilder_.dispose(); + fieldBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FieldNode_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode result = new org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.field_ = fieldBuilder_ == null + ? field_ + : fieldBuilder_.build(); + to_bitField0_ |= 0x00000001; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode.getDefaultInstance()) return this; + if (other.hasField()) { + mergeField(other.getField()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + input.readMessage( + getFieldFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private org.apache.arrow.gandiva.ipc.GandivaTypes.Field field_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.Field, org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder> fieldBuilder_; + /** + * optional .gandiva.types.Field field = 1; + * @return Whether the field field is set. + */ + public boolean hasField() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional .gandiva.types.Field field = 1; + * @return The field. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.Field getField() { + if (fieldBuilder_ == null) { + return field_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.Field.getDefaultInstance() : field_; + } else { + return fieldBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.Field field = 1; + */ + public Builder setField(org.apache.arrow.gandiva.ipc.GandivaTypes.Field value) { + if (fieldBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + field_ = value; + } else { + fieldBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional .gandiva.types.Field field = 1; + */ + public Builder setField( + org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder builderForValue) { + if (fieldBuilder_ == null) { + field_ = builderForValue.build(); + } else { + fieldBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional .gandiva.types.Field field = 1; + */ + public Builder mergeField(org.apache.arrow.gandiva.ipc.GandivaTypes.Field value) { + if (fieldBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0) && + field_ != null && + field_ != org.apache.arrow.gandiva.ipc.GandivaTypes.Field.getDefaultInstance()) { + getFieldBuilder().mergeFrom(value); + } else { + field_ = value; + } + } else { + fieldBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional .gandiva.types.Field field = 1; + */ + public Builder clearField() { + bitField0_ = (bitField0_ & ~0x00000001); + field_ = null; + if (fieldBuilder_ != null) { + fieldBuilder_.dispose(); + fieldBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.Field field = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder getFieldBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getFieldFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.Field field = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder getFieldOrBuilder() { + if (fieldBuilder_ != null) { + return fieldBuilder_.getMessageOrBuilder(); + } else { + return field_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.Field.getDefaultInstance() : field_; + } + } + /** + * optional .gandiva.types.Field field = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.Field, org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder> + getFieldFieldBuilder() { + if (fieldBuilder_ == null) { + fieldBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.Field, org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder>( + getField(), + getParentForChildren(), + isClean()); + field_ = null; + } + return fieldBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.FieldNode) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.FieldNode) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public FieldNode parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface FunctionNodeOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.FunctionNode) + com.google.protobuf.MessageOrBuilder { + + /** + * optional string functionName = 1; + * @return Whether the functionName field is set. + */ + boolean hasFunctionName(); + /** + * optional string functionName = 1; + * @return The functionName. + */ + java.lang.String getFunctionName(); + /** + * optional string functionName = 1; + * @return The bytes for functionName. + */ + com.google.protobuf.ByteString + getFunctionNameBytes(); + + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + java.util.List + getInArgsList(); + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getInArgs(int index); + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + int getInArgsCount(); + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + java.util.List + getInArgsOrBuilderList(); + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getInArgsOrBuilder( + int index); + + /** + * optional .gandiva.types.ExtGandivaType returnType = 3; + * @return Whether the returnType field is set. + */ + boolean hasReturnType(); + /** + * optional .gandiva.types.ExtGandivaType returnType = 3; + * @return The returnType. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getReturnType(); + /** + * optional .gandiva.types.ExtGandivaType returnType = 3; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder getReturnTypeOrBuilder(); + } + /** + * Protobuf type {@code gandiva.types.FunctionNode} + */ + public static final class FunctionNode extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.FunctionNode) + FunctionNodeOrBuilder { + private static final long serialVersionUID = 0L; + // Use FunctionNode.newBuilder() to construct. + private FunctionNode(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private FunctionNode() { + functionName_ = ""; + inArgs_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new FunctionNode(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FunctionNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FunctionNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode.Builder.class); + } + + private int bitField0_; + public static final int FUNCTIONNAME_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object functionName_ = ""; + /** + * optional string functionName = 1; + * @return Whether the functionName field is set. + */ + @java.lang.Override + public boolean hasFunctionName() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional string functionName = 1; + * @return The functionName. + */ + @java.lang.Override + public java.lang.String getFunctionName() { + java.lang.Object ref = functionName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + functionName_ = s; + return s; + } + } + /** + * optional string functionName = 1; + * @return The bytes for functionName. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getFunctionNameBytes() { + java.lang.Object ref = functionName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + functionName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int INARGS_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private java.util.List inArgs_; + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + @java.lang.Override + public java.util.List getInArgsList() { + return inArgs_; + } + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + @java.lang.Override + public java.util.List + getInArgsOrBuilderList() { + return inArgs_; + } + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + @java.lang.Override + public int getInArgsCount() { + return inArgs_.size(); + } + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getInArgs(int index) { + return inArgs_.get(index); + } + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getInArgsOrBuilder( + int index) { + return inArgs_.get(index); + } + + public static final int RETURNTYPE_FIELD_NUMBER = 3; + private org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType returnType_; + /** + * optional .gandiva.types.ExtGandivaType returnType = 3; + * @return Whether the returnType field is set. + */ + @java.lang.Override + public boolean hasReturnType() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 3; + * @return The returnType. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getReturnType() { + return returnType_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance() : returnType_; + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 3; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder getReturnTypeOrBuilder() { + return returnType_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance() : returnType_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, functionName_); + } + for (int i = 0; i < inArgs_.size(); i++) { + output.writeMessage(2, inArgs_.get(i)); + } + if (((bitField0_ & 0x00000002) != 0)) { + output.writeMessage(3, getReturnType()); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, functionName_); + } + for (int i = 0; i < inArgs_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, inArgs_.get(i)); + } + if (((bitField0_ & 0x00000002) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, getReturnType()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode other = (org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode) obj; + + if (hasFunctionName() != other.hasFunctionName()) return false; + if (hasFunctionName()) { + if (!getFunctionName() + .equals(other.getFunctionName())) return false; + } + if (!getInArgsList() + .equals(other.getInArgsList())) return false; + if (hasReturnType() != other.hasReturnType()) return false; + if (hasReturnType()) { + if (!getReturnType() + .equals(other.getReturnType())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasFunctionName()) { + hash = (37 * hash) + FUNCTIONNAME_FIELD_NUMBER; + hash = (53 * hash) + getFunctionName().hashCode(); + } + if (getInArgsCount() > 0) { + hash = (37 * hash) + INARGS_FIELD_NUMBER; + hash = (53 * hash) + getInArgsList().hashCode(); + } + if (hasReturnType()) { + hash = (37 * hash) + RETURNTYPE_FIELD_NUMBER; + hash = (53 * hash) + getReturnType().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.FunctionNode} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.FunctionNode) + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNodeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FunctionNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FunctionNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getInArgsFieldBuilder(); + getReturnTypeFieldBuilder(); + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + functionName_ = ""; + if (inArgsBuilder_ == null) { + inArgs_ = java.util.Collections.emptyList(); + } else { + inArgs_ = null; + inArgsBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + returnType_ = null; + if (returnTypeBuilder_ != null) { + returnTypeBuilder_.dispose(); + returnTypeBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FunctionNode_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode result = new org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields(org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode result) { + if (inArgsBuilder_ == null) { + if (((bitField0_ & 0x00000002) != 0)) { + inArgs_ = java.util.Collections.unmodifiableList(inArgs_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.inArgs_ = inArgs_; + } else { + result.inArgs_ = inArgsBuilder_.build(); + } + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.functionName_ = functionName_; + to_bitField0_ |= 0x00000001; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.returnType_ = returnTypeBuilder_ == null + ? returnType_ + : returnTypeBuilder_.build(); + to_bitField0_ |= 0x00000002; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode.getDefaultInstance()) return this; + if (other.hasFunctionName()) { + functionName_ = other.functionName_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (inArgsBuilder_ == null) { + if (!other.inArgs_.isEmpty()) { + if (inArgs_.isEmpty()) { + inArgs_ = other.inArgs_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureInArgsIsMutable(); + inArgs_.addAll(other.inArgs_); + } + onChanged(); + } + } else { + if (!other.inArgs_.isEmpty()) { + if (inArgsBuilder_.isEmpty()) { + inArgsBuilder_.dispose(); + inArgsBuilder_ = null; + inArgs_ = other.inArgs_; + bitField0_ = (bitField0_ & ~0x00000002); + inArgsBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getInArgsFieldBuilder() : null; + } else { + inArgsBuilder_.addAllMessages(other.inArgs_); + } + } + } + if (other.hasReturnType()) { + mergeReturnType(other.getReturnType()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + functionName_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode m = + input.readMessage( + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.parser(), + extensionRegistry); + if (inArgsBuilder_ == null) { + ensureInArgsIsMutable(); + inArgs_.add(m); + } else { + inArgsBuilder_.addMessage(m); + } + break; + } // case 18 + case 26: { + input.readMessage( + getReturnTypeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000004; + break; + } // case 26 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object functionName_ = ""; + /** + * optional string functionName = 1; + * @return Whether the functionName field is set. + */ + public boolean hasFunctionName() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional string functionName = 1; + * @return The functionName. + */ + public java.lang.String getFunctionName() { + java.lang.Object ref = functionName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + functionName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string functionName = 1; + * @return The bytes for functionName. + */ + public com.google.protobuf.ByteString + getFunctionNameBytes() { + java.lang.Object ref = functionName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + functionName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string functionName = 1; + * @param value The functionName to set. + * @return This builder for chaining. + */ + public Builder setFunctionName( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + functionName_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional string functionName = 1; + * @return This builder for chaining. + */ + public Builder clearFunctionName() { + functionName_ = getDefaultInstance().getFunctionName(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * optional string functionName = 1; + * @param value The bytes for functionName to set. + * @return This builder for chaining. + */ + public Builder setFunctionNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + functionName_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private java.util.List inArgs_ = + java.util.Collections.emptyList(); + private void ensureInArgsIsMutable() { + if (!((bitField0_ & 0x00000002) != 0)) { + inArgs_ = new java.util.ArrayList(inArgs_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder> inArgsBuilder_; + + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + public java.util.List getInArgsList() { + if (inArgsBuilder_ == null) { + return java.util.Collections.unmodifiableList(inArgs_); + } else { + return inArgsBuilder_.getMessageList(); + } + } + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + public int getInArgsCount() { + if (inArgsBuilder_ == null) { + return inArgs_.size(); + } else { + return inArgsBuilder_.getCount(); + } + } + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getInArgs(int index) { + if (inArgsBuilder_ == null) { + return inArgs_.get(index); + } else { + return inArgsBuilder_.getMessage(index); + } + } + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + public Builder setInArgs( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode value) { + if (inArgsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureInArgsIsMutable(); + inArgs_.set(index, value); + onChanged(); + } else { + inArgsBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + public Builder setInArgs( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder builderForValue) { + if (inArgsBuilder_ == null) { + ensureInArgsIsMutable(); + inArgs_.set(index, builderForValue.build()); + onChanged(); + } else { + inArgsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + public Builder addInArgs(org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode value) { + if (inArgsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureInArgsIsMutable(); + inArgs_.add(value); + onChanged(); + } else { + inArgsBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + public Builder addInArgs( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode value) { + if (inArgsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureInArgsIsMutable(); + inArgs_.add(index, value); + onChanged(); + } else { + inArgsBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + public Builder addInArgs( + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder builderForValue) { + if (inArgsBuilder_ == null) { + ensureInArgsIsMutable(); + inArgs_.add(builderForValue.build()); + onChanged(); + } else { + inArgsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + public Builder addInArgs( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder builderForValue) { + if (inArgsBuilder_ == null) { + ensureInArgsIsMutable(); + inArgs_.add(index, builderForValue.build()); + onChanged(); + } else { + inArgsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + public Builder addAllInArgs( + java.lang.Iterable values) { + if (inArgsBuilder_ == null) { + ensureInArgsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, inArgs_); + onChanged(); + } else { + inArgsBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + public Builder clearInArgs() { + if (inArgsBuilder_ == null) { + inArgs_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + inArgsBuilder_.clear(); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + public Builder removeInArgs(int index) { + if (inArgsBuilder_ == null) { + ensureInArgsIsMutable(); + inArgs_.remove(index); + onChanged(); + } else { + inArgsBuilder_.remove(index); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder getInArgsBuilder( + int index) { + return getInArgsFieldBuilder().getBuilder(index); + } + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getInArgsOrBuilder( + int index) { + if (inArgsBuilder_ == null) { + return inArgs_.get(index); } else { + return inArgsBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + public java.util.List + getInArgsOrBuilderList() { + if (inArgsBuilder_ != null) { + return inArgsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(inArgs_); + } + } + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder addInArgsBuilder() { + return getInArgsFieldBuilder().addBuilder( + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance()); + } + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder addInArgsBuilder( + int index) { + return getInArgsFieldBuilder().addBuilder( + index, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance()); + } + /** + * repeated .gandiva.types.TreeNode inArgs = 2; + */ + public java.util.List + getInArgsBuilderList() { + return getInArgsFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder> + getInArgsFieldBuilder() { + if (inArgsBuilder_ == null) { + inArgsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder>( + inArgs_, + ((bitField0_ & 0x00000002) != 0), + getParentForChildren(), + isClean()); + inArgs_ = null; + } + return inArgsBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType returnType_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder> returnTypeBuilder_; + /** + * optional .gandiva.types.ExtGandivaType returnType = 3; + * @return Whether the returnType field is set. + */ + public boolean hasReturnType() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 3; + * @return The returnType. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getReturnType() { + if (returnTypeBuilder_ == null) { + return returnType_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance() : returnType_; + } else { + return returnTypeBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 3; + */ + public Builder setReturnType(org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType value) { + if (returnTypeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + returnType_ = value; + } else { + returnTypeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 3; + */ + public Builder setReturnType( + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder builderForValue) { + if (returnTypeBuilder_ == null) { + returnType_ = builderForValue.build(); + } else { + returnTypeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 3; + */ + public Builder mergeReturnType(org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType value) { + if (returnTypeBuilder_ == null) { + if (((bitField0_ & 0x00000004) != 0) && + returnType_ != null && + returnType_ != org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance()) { + getReturnTypeBuilder().mergeFrom(value); + } else { + returnType_ = value; + } + } else { + returnTypeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 3; + */ + public Builder clearReturnType() { + bitField0_ = (bitField0_ & ~0x00000004); + returnType_ = null; + if (returnTypeBuilder_ != null) { + returnTypeBuilder_.dispose(); + returnTypeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 3; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder getReturnTypeBuilder() { + bitField0_ |= 0x00000004; + onChanged(); + return getReturnTypeFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 3; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder getReturnTypeOrBuilder() { + if (returnTypeBuilder_ != null) { + return returnTypeBuilder_.getMessageOrBuilder(); + } else { + return returnType_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance() : returnType_; + } + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 3; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder> + getReturnTypeFieldBuilder() { + if (returnTypeBuilder_ == null) { + returnTypeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder>( + getReturnType(), + getParentForChildren(), + isClean()); + returnType_ = null; + } + return returnTypeBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.FunctionNode) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.FunctionNode) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public FunctionNode parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface IfNodeOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.IfNode) + com.google.protobuf.MessageOrBuilder { + + /** + * optional .gandiva.types.TreeNode cond = 1; + * @return Whether the cond field is set. + */ + boolean hasCond(); + /** + * optional .gandiva.types.TreeNode cond = 1; + * @return The cond. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getCond(); + /** + * optional .gandiva.types.TreeNode cond = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getCondOrBuilder(); + + /** + * optional .gandiva.types.TreeNode thenNode = 2; + * @return Whether the thenNode field is set. + */ + boolean hasThenNode(); + /** + * optional .gandiva.types.TreeNode thenNode = 2; + * @return The thenNode. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getThenNode(); + /** + * optional .gandiva.types.TreeNode thenNode = 2; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getThenNodeOrBuilder(); + + /** + * optional .gandiva.types.TreeNode elseNode = 3; + * @return Whether the elseNode field is set. + */ + boolean hasElseNode(); + /** + * optional .gandiva.types.TreeNode elseNode = 3; + * @return The elseNode. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getElseNode(); + /** + * optional .gandiva.types.TreeNode elseNode = 3; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getElseNodeOrBuilder(); + + /** + * optional .gandiva.types.ExtGandivaType returnType = 4; + * @return Whether the returnType field is set. + */ + boolean hasReturnType(); + /** + * optional .gandiva.types.ExtGandivaType returnType = 4; + * @return The returnType. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getReturnType(); + /** + * optional .gandiva.types.ExtGandivaType returnType = 4; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder getReturnTypeOrBuilder(); + } + /** + * Protobuf type {@code gandiva.types.IfNode} + */ + public static final class IfNode extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.IfNode) + IfNodeOrBuilder { + private static final long serialVersionUID = 0L; + // Use IfNode.newBuilder() to construct. + private IfNode(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private IfNode() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new IfNode(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_IfNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_IfNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode.Builder.class); + } + + private int bitField0_; + public static final int COND_FIELD_NUMBER = 1; + private org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode cond_; + /** + * optional .gandiva.types.TreeNode cond = 1; + * @return Whether the cond field is set. + */ + @java.lang.Override + public boolean hasCond() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional .gandiva.types.TreeNode cond = 1; + * @return The cond. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getCond() { + return cond_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : cond_; + } + /** + * optional .gandiva.types.TreeNode cond = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getCondOrBuilder() { + return cond_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : cond_; + } + + public static final int THENNODE_FIELD_NUMBER = 2; + private org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode thenNode_; + /** + * optional .gandiva.types.TreeNode thenNode = 2; + * @return Whether the thenNode field is set. + */ + @java.lang.Override + public boolean hasThenNode() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * optional .gandiva.types.TreeNode thenNode = 2; + * @return The thenNode. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getThenNode() { + return thenNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : thenNode_; + } + /** + * optional .gandiva.types.TreeNode thenNode = 2; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getThenNodeOrBuilder() { + return thenNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : thenNode_; + } + + public static final int ELSENODE_FIELD_NUMBER = 3; + private org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode elseNode_; + /** + * optional .gandiva.types.TreeNode elseNode = 3; + * @return Whether the elseNode field is set. + */ + @java.lang.Override + public boolean hasElseNode() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + * optional .gandiva.types.TreeNode elseNode = 3; + * @return The elseNode. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getElseNode() { + return elseNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : elseNode_; + } + /** + * optional .gandiva.types.TreeNode elseNode = 3; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getElseNodeOrBuilder() { + return elseNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : elseNode_; + } + + public static final int RETURNTYPE_FIELD_NUMBER = 4; + private org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType returnType_; + /** + * optional .gandiva.types.ExtGandivaType returnType = 4; + * @return Whether the returnType field is set. + */ + @java.lang.Override + public boolean hasReturnType() { + return ((bitField0_ & 0x00000008) != 0); + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 4; + * @return The returnType. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getReturnType() { + return returnType_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance() : returnType_; + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 4; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder getReturnTypeOrBuilder() { + return returnType_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance() : returnType_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + output.writeMessage(1, getCond()); + } + if (((bitField0_ & 0x00000002) != 0)) { + output.writeMessage(2, getThenNode()); + } + if (((bitField0_ & 0x00000004) != 0)) { + output.writeMessage(3, getElseNode()); + } + if (((bitField0_ & 0x00000008) != 0)) { + output.writeMessage(4, getReturnType()); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getCond()); + } + if (((bitField0_ & 0x00000002) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, getThenNode()); + } + if (((bitField0_ & 0x00000004) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, getElseNode()); + } + if (((bitField0_ & 0x00000008) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, getReturnType()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode other = (org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode) obj; + + if (hasCond() != other.hasCond()) return false; + if (hasCond()) { + if (!getCond() + .equals(other.getCond())) return false; + } + if (hasThenNode() != other.hasThenNode()) return false; + if (hasThenNode()) { + if (!getThenNode() + .equals(other.getThenNode())) return false; + } + if (hasElseNode() != other.hasElseNode()) return false; + if (hasElseNode()) { + if (!getElseNode() + .equals(other.getElseNode())) return false; + } + if (hasReturnType() != other.hasReturnType()) return false; + if (hasReturnType()) { + if (!getReturnType() + .equals(other.getReturnType())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasCond()) { + hash = (37 * hash) + COND_FIELD_NUMBER; + hash = (53 * hash) + getCond().hashCode(); + } + if (hasThenNode()) { + hash = (37 * hash) + THENNODE_FIELD_NUMBER; + hash = (53 * hash) + getThenNode().hashCode(); + } + if (hasElseNode()) { + hash = (37 * hash) + ELSENODE_FIELD_NUMBER; + hash = (53 * hash) + getElseNode().hashCode(); + } + if (hasReturnType()) { + hash = (37 * hash) + RETURNTYPE_FIELD_NUMBER; + hash = (53 * hash) + getReturnType().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.IfNode} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.IfNode) + org.apache.arrow.gandiva.ipc.GandivaTypes.IfNodeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_IfNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_IfNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getCondFieldBuilder(); + getThenNodeFieldBuilder(); + getElseNodeFieldBuilder(); + getReturnTypeFieldBuilder(); + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + cond_ = null; + if (condBuilder_ != null) { + condBuilder_.dispose(); + condBuilder_ = null; + } + thenNode_ = null; + if (thenNodeBuilder_ != null) { + thenNodeBuilder_.dispose(); + thenNodeBuilder_ = null; + } + elseNode_ = null; + if (elseNodeBuilder_ != null) { + elseNodeBuilder_.dispose(); + elseNodeBuilder_ = null; + } + returnType_ = null; + if (returnTypeBuilder_ != null) { + returnTypeBuilder_.dispose(); + returnTypeBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_IfNode_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode result = new org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.cond_ = condBuilder_ == null + ? cond_ + : condBuilder_.build(); + to_bitField0_ |= 0x00000001; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.thenNode_ = thenNodeBuilder_ == null + ? thenNode_ + : thenNodeBuilder_.build(); + to_bitField0_ |= 0x00000002; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.elseNode_ = elseNodeBuilder_ == null + ? elseNode_ + : elseNodeBuilder_.build(); + to_bitField0_ |= 0x00000004; + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.returnType_ = returnTypeBuilder_ == null + ? returnType_ + : returnTypeBuilder_.build(); + to_bitField0_ |= 0x00000008; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode.getDefaultInstance()) return this; + if (other.hasCond()) { + mergeCond(other.getCond()); + } + if (other.hasThenNode()) { + mergeThenNode(other.getThenNode()); + } + if (other.hasElseNode()) { + mergeElseNode(other.getElseNode()); + } + if (other.hasReturnType()) { + mergeReturnType(other.getReturnType()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + input.readMessage( + getCondFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + input.readMessage( + getThenNodeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: { + input.readMessage( + getElseNodeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000004; + break; + } // case 26 + case 34: { + input.readMessage( + getReturnTypeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000008; + break; + } // case 34 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode cond_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder> condBuilder_; + /** + * optional .gandiva.types.TreeNode cond = 1; + * @return Whether the cond field is set. + */ + public boolean hasCond() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional .gandiva.types.TreeNode cond = 1; + * @return The cond. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getCond() { + if (condBuilder_ == null) { + return cond_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : cond_; + } else { + return condBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.TreeNode cond = 1; + */ + public Builder setCond(org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode value) { + if (condBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + cond_ = value; + } else { + condBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode cond = 1; + */ + public Builder setCond( + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder builderForValue) { + if (condBuilder_ == null) { + cond_ = builderForValue.build(); + } else { + condBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode cond = 1; + */ + public Builder mergeCond(org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode value) { + if (condBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0) && + cond_ != null && + cond_ != org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance()) { + getCondBuilder().mergeFrom(value); + } else { + cond_ = value; + } + } else { + condBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode cond = 1; + */ + public Builder clearCond() { + bitField0_ = (bitField0_ & ~0x00000001); + cond_ = null; + if (condBuilder_ != null) { + condBuilder_.dispose(); + condBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode cond = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder getCondBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getCondFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.TreeNode cond = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getCondOrBuilder() { + if (condBuilder_ != null) { + return condBuilder_.getMessageOrBuilder(); + } else { + return cond_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : cond_; + } + } + /** + * optional .gandiva.types.TreeNode cond = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder> + getCondFieldBuilder() { + if (condBuilder_ == null) { + condBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder>( + getCond(), + getParentForChildren(), + isClean()); + cond_ = null; + } + return condBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode thenNode_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder> thenNodeBuilder_; + /** + * optional .gandiva.types.TreeNode thenNode = 2; + * @return Whether the thenNode field is set. + */ + public boolean hasThenNode() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * optional .gandiva.types.TreeNode thenNode = 2; + * @return The thenNode. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getThenNode() { + if (thenNodeBuilder_ == null) { + return thenNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : thenNode_; + } else { + return thenNodeBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.TreeNode thenNode = 2; + */ + public Builder setThenNode(org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode value) { + if (thenNodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + thenNode_ = value; + } else { + thenNodeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode thenNode = 2; + */ + public Builder setThenNode( + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder builderForValue) { + if (thenNodeBuilder_ == null) { + thenNode_ = builderForValue.build(); + } else { + thenNodeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode thenNode = 2; + */ + public Builder mergeThenNode(org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode value) { + if (thenNodeBuilder_ == null) { + if (((bitField0_ & 0x00000002) != 0) && + thenNode_ != null && + thenNode_ != org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance()) { + getThenNodeBuilder().mergeFrom(value); + } else { + thenNode_ = value; + } + } else { + thenNodeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode thenNode = 2; + */ + public Builder clearThenNode() { + bitField0_ = (bitField0_ & ~0x00000002); + thenNode_ = null; + if (thenNodeBuilder_ != null) { + thenNodeBuilder_.dispose(); + thenNodeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode thenNode = 2; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder getThenNodeBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getThenNodeFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.TreeNode thenNode = 2; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getThenNodeOrBuilder() { + if (thenNodeBuilder_ != null) { + return thenNodeBuilder_.getMessageOrBuilder(); + } else { + return thenNode_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : thenNode_; + } + } + /** + * optional .gandiva.types.TreeNode thenNode = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder> + getThenNodeFieldBuilder() { + if (thenNodeBuilder_ == null) { + thenNodeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder>( + getThenNode(), + getParentForChildren(), + isClean()); + thenNode_ = null; + } + return thenNodeBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode elseNode_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder> elseNodeBuilder_; + /** + * optional .gandiva.types.TreeNode elseNode = 3; + * @return Whether the elseNode field is set. + */ + public boolean hasElseNode() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + * optional .gandiva.types.TreeNode elseNode = 3; + * @return The elseNode. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getElseNode() { + if (elseNodeBuilder_ == null) { + return elseNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : elseNode_; + } else { + return elseNodeBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.TreeNode elseNode = 3; + */ + public Builder setElseNode(org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode value) { + if (elseNodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + elseNode_ = value; + } else { + elseNodeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode elseNode = 3; + */ + public Builder setElseNode( + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder builderForValue) { + if (elseNodeBuilder_ == null) { + elseNode_ = builderForValue.build(); + } else { + elseNodeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode elseNode = 3; + */ + public Builder mergeElseNode(org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode value) { + if (elseNodeBuilder_ == null) { + if (((bitField0_ & 0x00000004) != 0) && + elseNode_ != null && + elseNode_ != org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance()) { + getElseNodeBuilder().mergeFrom(value); + } else { + elseNode_ = value; + } + } else { + elseNodeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode elseNode = 3; + */ + public Builder clearElseNode() { + bitField0_ = (bitField0_ & ~0x00000004); + elseNode_ = null; + if (elseNodeBuilder_ != null) { + elseNodeBuilder_.dispose(); + elseNodeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode elseNode = 3; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder getElseNodeBuilder() { + bitField0_ |= 0x00000004; + onChanged(); + return getElseNodeFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.TreeNode elseNode = 3; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getElseNodeOrBuilder() { + if (elseNodeBuilder_ != null) { + return elseNodeBuilder_.getMessageOrBuilder(); + } else { + return elseNode_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : elseNode_; + } + } + /** + * optional .gandiva.types.TreeNode elseNode = 3; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder> + getElseNodeFieldBuilder() { + if (elseNodeBuilder_ == null) { + elseNodeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder>( + getElseNode(), + getParentForChildren(), + isClean()); + elseNode_ = null; + } + return elseNodeBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType returnType_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder> returnTypeBuilder_; + /** + * optional .gandiva.types.ExtGandivaType returnType = 4; + * @return Whether the returnType field is set. + */ + public boolean hasReturnType() { + return ((bitField0_ & 0x00000008) != 0); + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 4; + * @return The returnType. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getReturnType() { + if (returnTypeBuilder_ == null) { + return returnType_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance() : returnType_; + } else { + return returnTypeBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 4; + */ + public Builder setReturnType(org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType value) { + if (returnTypeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + returnType_ = value; + } else { + returnTypeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 4; + */ + public Builder setReturnType( + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder builderForValue) { + if (returnTypeBuilder_ == null) { + returnType_ = builderForValue.build(); + } else { + returnTypeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 4; + */ + public Builder mergeReturnType(org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType value) { + if (returnTypeBuilder_ == null) { + if (((bitField0_ & 0x00000008) != 0) && + returnType_ != null && + returnType_ != org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance()) { + getReturnTypeBuilder().mergeFrom(value); + } else { + returnType_ = value; + } + } else { + returnTypeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 4; + */ + public Builder clearReturnType() { + bitField0_ = (bitField0_ & ~0x00000008); + returnType_ = null; + if (returnTypeBuilder_ != null) { + returnTypeBuilder_.dispose(); + returnTypeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 4; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder getReturnTypeBuilder() { + bitField0_ |= 0x00000008; + onChanged(); + return getReturnTypeFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 4; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder getReturnTypeOrBuilder() { + if (returnTypeBuilder_ != null) { + return returnTypeBuilder_.getMessageOrBuilder(); + } else { + return returnType_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance() : returnType_; + } + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 4; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder> + getReturnTypeFieldBuilder() { + if (returnTypeBuilder_ == null) { + returnTypeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder>( + getReturnType(), + getParentForChildren(), + isClean()); + returnType_ = null; + } + return returnTypeBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.IfNode) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.IfNode) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public IfNode parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface AndNodeOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.AndNode) + com.google.protobuf.MessageOrBuilder { + + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + java.util.List + getArgsList(); + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getArgs(int index); + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + int getArgsCount(); + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + java.util.List + getArgsOrBuilderList(); + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getArgsOrBuilder( + int index); + } + /** + * Protobuf type {@code gandiva.types.AndNode} + */ + public static final class AndNode extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.AndNode) + AndNodeOrBuilder { + private static final long serialVersionUID = 0L; + // Use AndNode.newBuilder() to construct. + private AndNode(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private AndNode() { + args_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new AndNode(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_AndNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_AndNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode.Builder.class); + } + + public static final int ARGS_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private java.util.List args_; + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + @java.lang.Override + public java.util.List getArgsList() { + return args_; + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + @java.lang.Override + public java.util.List + getArgsOrBuilderList() { + return args_; + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + @java.lang.Override + public int getArgsCount() { + return args_.size(); + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getArgs(int index) { + return args_.get(index); + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getArgsOrBuilder( + int index) { + return args_.get(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < args_.size(); i++) { + output.writeMessage(1, args_.get(i)); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < args_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, args_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode other = (org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode) obj; + + if (!getArgsList() + .equals(other.getArgsList())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getArgsCount() > 0) { + hash = (37 * hash) + ARGS_FIELD_NUMBER; + hash = (53 * hash) + getArgsList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.AndNode} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.AndNode) + org.apache.arrow.gandiva.ipc.GandivaTypes.AndNodeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_AndNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_AndNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + if (argsBuilder_ == null) { + args_ = java.util.Collections.emptyList(); + } else { + args_ = null; + argsBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_AndNode_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode result = new org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields(org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode result) { + if (argsBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + args_ = java.util.Collections.unmodifiableList(args_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.args_ = args_; + } else { + result.args_ = argsBuilder_.build(); + } + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode result) { + int from_bitField0_ = bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode.getDefaultInstance()) return this; + if (argsBuilder_ == null) { + if (!other.args_.isEmpty()) { + if (args_.isEmpty()) { + args_ = other.args_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureArgsIsMutable(); + args_.addAll(other.args_); + } + onChanged(); + } + } else { + if (!other.args_.isEmpty()) { + if (argsBuilder_.isEmpty()) { + argsBuilder_.dispose(); + argsBuilder_ = null; + args_ = other.args_; + bitField0_ = (bitField0_ & ~0x00000001); + argsBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getArgsFieldBuilder() : null; + } else { + argsBuilder_.addAllMessages(other.args_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode m = + input.readMessage( + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.parser(), + extensionRegistry); + if (argsBuilder_ == null) { + ensureArgsIsMutable(); + args_.add(m); + } else { + argsBuilder_.addMessage(m); + } + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.util.List args_ = + java.util.Collections.emptyList(); + private void ensureArgsIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + args_ = new java.util.ArrayList(args_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder> argsBuilder_; + + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public java.util.List getArgsList() { + if (argsBuilder_ == null) { + return java.util.Collections.unmodifiableList(args_); + } else { + return argsBuilder_.getMessageList(); + } + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public int getArgsCount() { + if (argsBuilder_ == null) { + return args_.size(); + } else { + return argsBuilder_.getCount(); + } + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getArgs(int index) { + if (argsBuilder_ == null) { + return args_.get(index); + } else { + return argsBuilder_.getMessage(index); + } + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public Builder setArgs( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode value) { + if (argsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureArgsIsMutable(); + args_.set(index, value); + onChanged(); + } else { + argsBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public Builder setArgs( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder builderForValue) { + if (argsBuilder_ == null) { + ensureArgsIsMutable(); + args_.set(index, builderForValue.build()); + onChanged(); + } else { + argsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public Builder addArgs(org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode value) { + if (argsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureArgsIsMutable(); + args_.add(value); + onChanged(); + } else { + argsBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public Builder addArgs( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode value) { + if (argsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureArgsIsMutable(); + args_.add(index, value); + onChanged(); + } else { + argsBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public Builder addArgs( + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder builderForValue) { + if (argsBuilder_ == null) { + ensureArgsIsMutable(); + args_.add(builderForValue.build()); + onChanged(); + } else { + argsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public Builder addArgs( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder builderForValue) { + if (argsBuilder_ == null) { + ensureArgsIsMutable(); + args_.add(index, builderForValue.build()); + onChanged(); + } else { + argsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public Builder addAllArgs( + java.lang.Iterable values) { + if (argsBuilder_ == null) { + ensureArgsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, args_); + onChanged(); + } else { + argsBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public Builder clearArgs() { + if (argsBuilder_ == null) { + args_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + argsBuilder_.clear(); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public Builder removeArgs(int index) { + if (argsBuilder_ == null) { + ensureArgsIsMutable(); + args_.remove(index); + onChanged(); + } else { + argsBuilder_.remove(index); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder getArgsBuilder( + int index) { + return getArgsFieldBuilder().getBuilder(index); + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getArgsOrBuilder( + int index) { + if (argsBuilder_ == null) { + return args_.get(index); } else { + return argsBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public java.util.List + getArgsOrBuilderList() { + if (argsBuilder_ != null) { + return argsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(args_); + } + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder addArgsBuilder() { + return getArgsFieldBuilder().addBuilder( + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance()); + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder addArgsBuilder( + int index) { + return getArgsFieldBuilder().addBuilder( + index, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance()); + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public java.util.List + getArgsBuilderList() { + return getArgsFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder> + getArgsFieldBuilder() { + if (argsBuilder_ == null) { + argsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder>( + args_, + ((bitField0_ & 0x00000001) != 0), + getParentForChildren(), + isClean()); + args_ = null; + } + return argsBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.AndNode) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.AndNode) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public AndNode parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface OrNodeOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.OrNode) + com.google.protobuf.MessageOrBuilder { + + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + java.util.List + getArgsList(); + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getArgs(int index); + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + int getArgsCount(); + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + java.util.List + getArgsOrBuilderList(); + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getArgsOrBuilder( + int index); + } + /** + * Protobuf type {@code gandiva.types.OrNode} + */ + public static final class OrNode extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.OrNode) + OrNodeOrBuilder { + private static final long serialVersionUID = 0L; + // Use OrNode.newBuilder() to construct. + private OrNode(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private OrNode() { + args_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new OrNode(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_OrNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_OrNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode.Builder.class); + } + + public static final int ARGS_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private java.util.List args_; + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + @java.lang.Override + public java.util.List getArgsList() { + return args_; + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + @java.lang.Override + public java.util.List + getArgsOrBuilderList() { + return args_; + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + @java.lang.Override + public int getArgsCount() { + return args_.size(); + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getArgs(int index) { + return args_.get(index); + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getArgsOrBuilder( + int index) { + return args_.get(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < args_.size(); i++) { + output.writeMessage(1, args_.get(i)); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < args_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, args_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode other = (org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode) obj; + + if (!getArgsList() + .equals(other.getArgsList())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getArgsCount() > 0) { + hash = (37 * hash) + ARGS_FIELD_NUMBER; + hash = (53 * hash) + getArgsList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.OrNode} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.OrNode) + org.apache.arrow.gandiva.ipc.GandivaTypes.OrNodeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_OrNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_OrNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + if (argsBuilder_ == null) { + args_ = java.util.Collections.emptyList(); + } else { + args_ = null; + argsBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_OrNode_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode result = new org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields(org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode result) { + if (argsBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + args_ = java.util.Collections.unmodifiableList(args_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.args_ = args_; + } else { + result.args_ = argsBuilder_.build(); + } + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode result) { + int from_bitField0_ = bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode.getDefaultInstance()) return this; + if (argsBuilder_ == null) { + if (!other.args_.isEmpty()) { + if (args_.isEmpty()) { + args_ = other.args_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureArgsIsMutable(); + args_.addAll(other.args_); + } + onChanged(); + } + } else { + if (!other.args_.isEmpty()) { + if (argsBuilder_.isEmpty()) { + argsBuilder_.dispose(); + argsBuilder_ = null; + args_ = other.args_; + bitField0_ = (bitField0_ & ~0x00000001); + argsBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getArgsFieldBuilder() : null; + } else { + argsBuilder_.addAllMessages(other.args_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode m = + input.readMessage( + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.parser(), + extensionRegistry); + if (argsBuilder_ == null) { + ensureArgsIsMutable(); + args_.add(m); + } else { + argsBuilder_.addMessage(m); + } + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.util.List args_ = + java.util.Collections.emptyList(); + private void ensureArgsIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + args_ = new java.util.ArrayList(args_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder> argsBuilder_; + + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public java.util.List getArgsList() { + if (argsBuilder_ == null) { + return java.util.Collections.unmodifiableList(args_); + } else { + return argsBuilder_.getMessageList(); + } + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public int getArgsCount() { + if (argsBuilder_ == null) { + return args_.size(); + } else { + return argsBuilder_.getCount(); + } + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getArgs(int index) { + if (argsBuilder_ == null) { + return args_.get(index); + } else { + return argsBuilder_.getMessage(index); + } + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public Builder setArgs( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode value) { + if (argsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureArgsIsMutable(); + args_.set(index, value); + onChanged(); + } else { + argsBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public Builder setArgs( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder builderForValue) { + if (argsBuilder_ == null) { + ensureArgsIsMutable(); + args_.set(index, builderForValue.build()); + onChanged(); + } else { + argsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public Builder addArgs(org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode value) { + if (argsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureArgsIsMutable(); + args_.add(value); + onChanged(); + } else { + argsBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public Builder addArgs( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode value) { + if (argsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureArgsIsMutable(); + args_.add(index, value); + onChanged(); + } else { + argsBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public Builder addArgs( + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder builderForValue) { + if (argsBuilder_ == null) { + ensureArgsIsMutable(); + args_.add(builderForValue.build()); + onChanged(); + } else { + argsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public Builder addArgs( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder builderForValue) { + if (argsBuilder_ == null) { + ensureArgsIsMutable(); + args_.add(index, builderForValue.build()); + onChanged(); + } else { + argsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public Builder addAllArgs( + java.lang.Iterable values) { + if (argsBuilder_ == null) { + ensureArgsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, args_); + onChanged(); + } else { + argsBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public Builder clearArgs() { + if (argsBuilder_ == null) { + args_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + argsBuilder_.clear(); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public Builder removeArgs(int index) { + if (argsBuilder_ == null) { + ensureArgsIsMutable(); + args_.remove(index); + onChanged(); + } else { + argsBuilder_.remove(index); + } + return this; + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder getArgsBuilder( + int index) { + return getArgsFieldBuilder().getBuilder(index); + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getArgsOrBuilder( + int index) { + if (argsBuilder_ == null) { + return args_.get(index); } else { + return argsBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public java.util.List + getArgsOrBuilderList() { + if (argsBuilder_ != null) { + return argsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(args_); + } + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder addArgsBuilder() { + return getArgsFieldBuilder().addBuilder( + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance()); + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder addArgsBuilder( + int index) { + return getArgsFieldBuilder().addBuilder( + index, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance()); + } + /** + * repeated .gandiva.types.TreeNode args = 1; + */ + public java.util.List + getArgsBuilderList() { + return getArgsFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder> + getArgsFieldBuilder() { + if (argsBuilder_ == null) { + argsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder>( + args_, + ((bitField0_ & 0x00000001) != 0), + getParentForChildren(), + isClean()); + args_ = null; + } + return argsBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.OrNode) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.OrNode) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public OrNode parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface NullNodeOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.NullNode) + com.google.protobuf.MessageOrBuilder { + + /** + * optional .gandiva.types.ExtGandivaType type = 1; + * @return Whether the type field is set. + */ + boolean hasType(); + /** + * optional .gandiva.types.ExtGandivaType type = 1; + * @return The type. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getType(); + /** + * optional .gandiva.types.ExtGandivaType type = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder getTypeOrBuilder(); + } + /** + * Protobuf type {@code gandiva.types.NullNode} + */ + public static final class NullNode extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.NullNode) + NullNodeOrBuilder { + private static final long serialVersionUID = 0L; + // Use NullNode.newBuilder() to construct. + private NullNode(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private NullNode() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new NullNode(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_NullNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_NullNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode.Builder.class); + } + + private int bitField0_; + public static final int TYPE_FIELD_NUMBER = 1; + private org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType type_; + /** + * optional .gandiva.types.ExtGandivaType type = 1; + * @return Whether the type field is set. + */ + @java.lang.Override + public boolean hasType() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional .gandiva.types.ExtGandivaType type = 1; + * @return The type. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getType() { + return type_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance() : type_; + } + /** + * optional .gandiva.types.ExtGandivaType type = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder getTypeOrBuilder() { + return type_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance() : type_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + output.writeMessage(1, getType()); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getType()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode other = (org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode) obj; + + if (hasType() != other.hasType()) return false; + if (hasType()) { + if (!getType() + .equals(other.getType())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasType()) { + hash = (37 * hash) + TYPE_FIELD_NUMBER; + hash = (53 * hash) + getType().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.NullNode} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.NullNode) + org.apache.arrow.gandiva.ipc.GandivaTypes.NullNodeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_NullNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_NullNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getTypeFieldBuilder(); + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + type_ = null; + if (typeBuilder_ != null) { + typeBuilder_.dispose(); + typeBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_NullNode_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode result = new org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.type_ = typeBuilder_ == null + ? type_ + : typeBuilder_.build(); + to_bitField0_ |= 0x00000001; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode.getDefaultInstance()) return this; + if (other.hasType()) { + mergeType(other.getType()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + input.readMessage( + getTypeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType type_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder> typeBuilder_; + /** + * optional .gandiva.types.ExtGandivaType type = 1; + * @return Whether the type field is set. + */ + public boolean hasType() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional .gandiva.types.ExtGandivaType type = 1; + * @return The type. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getType() { + if (typeBuilder_ == null) { + return type_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance() : type_; + } else { + return typeBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.ExtGandivaType type = 1; + */ + public Builder setType(org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType value) { + if (typeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + type_ = value; + } else { + typeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional .gandiva.types.ExtGandivaType type = 1; + */ + public Builder setType( + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder builderForValue) { + if (typeBuilder_ == null) { + type_ = builderForValue.build(); + } else { + typeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional .gandiva.types.ExtGandivaType type = 1; + */ + public Builder mergeType(org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType value) { + if (typeBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0) && + type_ != null && + type_ != org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance()) { + getTypeBuilder().mergeFrom(value); + } else { + type_ = value; + } + } else { + typeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional .gandiva.types.ExtGandivaType type = 1; + */ + public Builder clearType() { + bitField0_ = (bitField0_ & ~0x00000001); + type_ = null; + if (typeBuilder_ != null) { + typeBuilder_.dispose(); + typeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.ExtGandivaType type = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder getTypeBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getTypeFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.ExtGandivaType type = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder getTypeOrBuilder() { + if (typeBuilder_ != null) { + return typeBuilder_.getMessageOrBuilder(); + } else { + return type_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance() : type_; + } + } + /** + * optional .gandiva.types.ExtGandivaType type = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder> + getTypeFieldBuilder() { + if (typeBuilder_ == null) { + typeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder>( + getType(), + getParentForChildren(), + isClean()); + type_ = null; + } + return typeBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.NullNode) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.NullNode) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public NullNode parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface IntNodeOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.IntNode) + com.google.protobuf.MessageOrBuilder { + + /** + * optional int32 value = 1; + * @return Whether the value field is set. + */ + boolean hasValue(); + /** + * optional int32 value = 1; + * @return The value. + */ + int getValue(); + } + /** + * Protobuf type {@code gandiva.types.IntNode} + */ + public static final class IntNode extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.IntNode) + IntNodeOrBuilder { + private static final long serialVersionUID = 0L; + // Use IntNode.newBuilder() to construct. + private IntNode(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private IntNode() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new IntNode(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_IntNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_IntNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.Builder.class); + } + + private int bitField0_; + public static final int VALUE_FIELD_NUMBER = 1; + private int value_ = 0; + /** + * optional int32 value = 1; + * @return Whether the value field is set. + */ + @java.lang.Override + public boolean hasValue() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional int32 value = 1; + * @return The value. + */ + @java.lang.Override + public int getValue() { + return value_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + output.writeInt32(1, value_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(1, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode other = (org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode) obj; + + if (hasValue() != other.hasValue()) return false; + if (hasValue()) { + if (getValue() + != other.getValue()) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.IntNode} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.IntNode) + org.apache.arrow.gandiva.ipc.GandivaTypes.IntNodeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_IntNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_IntNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + value_ = 0; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_IntNode_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode result = new org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.value_ = value_; + to_bitField0_ |= 0x00000001; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.getDefaultInstance()) return this; + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + value_ = input.readInt32(); + bitField0_ |= 0x00000001; + break; + } // case 8 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private int value_ ; + /** + * optional int32 value = 1; + * @return Whether the value field is set. + */ + @java.lang.Override + public boolean hasValue() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional int32 value = 1; + * @return The value. + */ + @java.lang.Override + public int getValue() { + return value_; + } + /** + * optional int32 value = 1; + * @param value The value to set. + * @return This builder for chaining. + */ + public Builder setValue(int value) { + + value_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional int32 value = 1; + * @return This builder for chaining. + */ + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000001); + value_ = 0; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.IntNode) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.IntNode) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public IntNode parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface FloatNodeOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.FloatNode) + com.google.protobuf.MessageOrBuilder { + + /** + * optional float value = 1; + * @return Whether the value field is set. + */ + boolean hasValue(); + /** + * optional float value = 1; + * @return The value. + */ + float getValue(); + } + /** + * Protobuf type {@code gandiva.types.FloatNode} + */ + public static final class FloatNode extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.FloatNode) + FloatNodeOrBuilder { + private static final long serialVersionUID = 0L; + // Use FloatNode.newBuilder() to construct. + private FloatNode(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private FloatNode() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new FloatNode(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FloatNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FloatNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.Builder.class); + } + + private int bitField0_; + public static final int VALUE_FIELD_NUMBER = 1; + private float value_ = 0F; + /** + * optional float value = 1; + * @return Whether the value field is set. + */ + @java.lang.Override + public boolean hasValue() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional float value = 1; + * @return The value. + */ + @java.lang.Override + public float getValue() { + return value_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + output.writeFloat(1, value_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeFloatSize(1, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode other = (org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode) obj; + + if (hasValue() != other.hasValue()) return false; + if (hasValue()) { + if (java.lang.Float.floatToIntBits(getValue()) + != java.lang.Float.floatToIntBits( + other.getValue())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + java.lang.Float.floatToIntBits( + getValue()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.FloatNode} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.FloatNode) + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNodeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FloatNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FloatNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + value_ = 0F; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FloatNode_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode result = new org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.value_ = value_; + to_bitField0_ |= 0x00000001; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.getDefaultInstance()) return this; + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 13: { + value_ = input.readFloat(); + bitField0_ |= 0x00000001; + break; + } // case 13 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private float value_ ; + /** + * optional float value = 1; + * @return Whether the value field is set. + */ + @java.lang.Override + public boolean hasValue() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional float value = 1; + * @return The value. + */ + @java.lang.Override + public float getValue() { + return value_; + } + /** + * optional float value = 1; + * @param value The value to set. + * @return This builder for chaining. + */ + public Builder setValue(float value) { + + value_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional float value = 1; + * @return This builder for chaining. + */ + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000001); + value_ = 0F; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.FloatNode) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.FloatNode) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public FloatNode parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface DoubleNodeOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.DoubleNode) + com.google.protobuf.MessageOrBuilder { + + /** + * optional double value = 1; + * @return Whether the value field is set. + */ + boolean hasValue(); + /** + * optional double value = 1; + * @return The value. + */ + double getValue(); + } + /** + * Protobuf type {@code gandiva.types.DoubleNode} + */ + public static final class DoubleNode extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.DoubleNode) + DoubleNodeOrBuilder { + private static final long serialVersionUID = 0L; + // Use DoubleNode.newBuilder() to construct. + private DoubleNode(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private DoubleNode() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new DoubleNode(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_DoubleNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_DoubleNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.Builder.class); + } + + private int bitField0_; + public static final int VALUE_FIELD_NUMBER = 1; + private double value_ = 0D; + /** + * optional double value = 1; + * @return Whether the value field is set. + */ + @java.lang.Override + public boolean hasValue() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional double value = 1; + * @return The value. + */ + @java.lang.Override + public double getValue() { + return value_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + output.writeDouble(1, value_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeDoubleSize(1, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode other = (org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode) obj; + + if (hasValue() != other.hasValue()) return false; + if (hasValue()) { + if (java.lang.Double.doubleToLongBits(getValue()) + != java.lang.Double.doubleToLongBits( + other.getValue())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + java.lang.Double.doubleToLongBits(getValue())); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.DoubleNode} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.DoubleNode) + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNodeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_DoubleNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_DoubleNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + value_ = 0D; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_DoubleNode_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode result = new org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.value_ = value_; + to_bitField0_ |= 0x00000001; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.getDefaultInstance()) return this; + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 9: { + value_ = input.readDouble(); + bitField0_ |= 0x00000001; + break; + } // case 9 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private double value_ ; + /** + * optional double value = 1; + * @return Whether the value field is set. + */ + @java.lang.Override + public boolean hasValue() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional double value = 1; + * @return The value. + */ + @java.lang.Override + public double getValue() { + return value_; + } + /** + * optional double value = 1; + * @param value The value to set. + * @return This builder for chaining. + */ + public Builder setValue(double value) { + + value_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional double value = 1; + * @return This builder for chaining. + */ + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000001); + value_ = 0D; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.DoubleNode) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.DoubleNode) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public DoubleNode parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface BooleanNodeOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.BooleanNode) + com.google.protobuf.MessageOrBuilder { + + /** + * optional bool value = 1; + * @return Whether the value field is set. + */ + boolean hasValue(); + /** + * optional bool value = 1; + * @return The value. + */ + boolean getValue(); + } + /** + * Protobuf type {@code gandiva.types.BooleanNode} + */ + public static final class BooleanNode extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.BooleanNode) + BooleanNodeOrBuilder { + private static final long serialVersionUID = 0L; + // Use BooleanNode.newBuilder() to construct. + private BooleanNode(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private BooleanNode() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new BooleanNode(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_BooleanNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_BooleanNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode.Builder.class); + } + + private int bitField0_; + public static final int VALUE_FIELD_NUMBER = 1; + private boolean value_ = false; + /** + * optional bool value = 1; + * @return Whether the value field is set. + */ + @java.lang.Override + public boolean hasValue() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional bool value = 1; + * @return The value. + */ + @java.lang.Override + public boolean getValue() { + return value_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + output.writeBool(1, value_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode other = (org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode) obj; + + if (hasValue() != other.hasValue()) return false; + if (hasValue()) { + if (getValue() + != other.getValue()) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getValue()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.BooleanNode} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.BooleanNode) + org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNodeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_BooleanNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_BooleanNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + value_ = false; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_BooleanNode_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode result = new org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.value_ = value_; + to_bitField0_ |= 0x00000001; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode.getDefaultInstance()) return this; + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + value_ = input.readBool(); + bitField0_ |= 0x00000001; + break; + } // case 8 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private boolean value_ ; + /** + * optional bool value = 1; + * @return Whether the value field is set. + */ + @java.lang.Override + public boolean hasValue() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional bool value = 1; + * @return The value. + */ + @java.lang.Override + public boolean getValue() { + return value_; + } + /** + * optional bool value = 1; + * @param value The value to set. + * @return This builder for chaining. + */ + public Builder setValue(boolean value) { + + value_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional bool value = 1; + * @return This builder for chaining. + */ + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000001); + value_ = false; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.BooleanNode) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.BooleanNode) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public BooleanNode parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface LongNodeOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.LongNode) + com.google.protobuf.MessageOrBuilder { + + /** + * optional int64 value = 1; + * @return Whether the value field is set. + */ + boolean hasValue(); + /** + * optional int64 value = 1; + * @return The value. + */ + long getValue(); + } + /** + * Protobuf type {@code gandiva.types.LongNode} + */ + public static final class LongNode extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.LongNode) + LongNodeOrBuilder { + private static final long serialVersionUID = 0L; + // Use LongNode.newBuilder() to construct. + private LongNode(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private LongNode() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new LongNode(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_LongNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_LongNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.Builder.class); + } + + private int bitField0_; + public static final int VALUE_FIELD_NUMBER = 1; + private long value_ = 0L; + /** + * optional int64 value = 1; + * @return Whether the value field is set. + */ + @java.lang.Override + public boolean hasValue() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional int64 value = 1; + * @return The value. + */ + @java.lang.Override + public long getValue() { + return value_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + output.writeInt64(1, value_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(1, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode other = (org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode) obj; + + if (hasValue() != other.hasValue()) return false; + if (hasValue()) { + if (getValue() + != other.getValue()) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getValue()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.LongNode} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.LongNode) + org.apache.arrow.gandiva.ipc.GandivaTypes.LongNodeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_LongNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_LongNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + value_ = 0L; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_LongNode_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode result = new org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.value_ = value_; + to_bitField0_ |= 0x00000001; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.getDefaultInstance()) return this; + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + value_ = input.readInt64(); + bitField0_ |= 0x00000001; + break; + } // case 8 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private long value_ ; + /** + * optional int64 value = 1; + * @return Whether the value field is set. + */ + @java.lang.Override + public boolean hasValue() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional int64 value = 1; + * @return The value. + */ + @java.lang.Override + public long getValue() { + return value_; + } + /** + * optional int64 value = 1; + * @param value The value to set. + * @return This builder for chaining. + */ + public Builder setValue(long value) { + + value_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional int64 value = 1; + * @return This builder for chaining. + */ + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000001); + value_ = 0L; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.LongNode) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.LongNode) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public LongNode parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface StringNodeOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.StringNode) + com.google.protobuf.MessageOrBuilder { + + /** + * optional bytes value = 1; + * @return Whether the value field is set. + */ + boolean hasValue(); + /** + * optional bytes value = 1; + * @return The value. + */ + com.google.protobuf.ByteString getValue(); + } + /** + * Protobuf type {@code gandiva.types.StringNode} + */ + public static final class StringNode extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.StringNode) + StringNodeOrBuilder { + private static final long serialVersionUID = 0L; + // Use StringNode.newBuilder() to construct. + private StringNode(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private StringNode() { + value_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new StringNode(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_StringNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_StringNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.Builder.class); + } + + private int bitField0_; + public static final int VALUE_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes value = 1; + * @return Whether the value field is set. + */ + @java.lang.Override + public boolean hasValue() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional bytes value = 1; + * @return The value. + */ + @java.lang.Override + public com.google.protobuf.ByteString getValue() { + return value_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + output.writeBytes(1, value_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode other = (org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode) obj; + + if (hasValue() != other.hasValue()) return false; + if (hasValue()) { + if (!getValue() + .equals(other.getValue())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.StringNode} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.StringNode) + org.apache.arrow.gandiva.ipc.GandivaTypes.StringNodeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_StringNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_StringNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + value_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_StringNode_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode result = new org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.value_ = value_; + to_bitField0_ |= 0x00000001; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.getDefaultInstance()) return this; + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + value_ = input.readBytes(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes value = 1; + * @return Whether the value field is set. + */ + @java.lang.Override + public boolean hasValue() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional bytes value = 1; + * @return The value. + */ + @java.lang.Override + public com.google.protobuf.ByteString getValue() { + return value_; + } + /** + * optional bytes value = 1; + * @param value The value to set. + * @return This builder for chaining. + */ + public Builder setValue(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + value_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional bytes value = 1; + * @return This builder for chaining. + */ + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000001); + value_ = getDefaultInstance().getValue(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.StringNode) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.StringNode) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public StringNode parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface BinaryNodeOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.BinaryNode) + com.google.protobuf.MessageOrBuilder { + + /** + * optional bytes value = 1; + * @return Whether the value field is set. + */ + boolean hasValue(); + /** + * optional bytes value = 1; + * @return The value. + */ + com.google.protobuf.ByteString getValue(); + } + /** + * Protobuf type {@code gandiva.types.BinaryNode} + */ + public static final class BinaryNode extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.BinaryNode) + BinaryNodeOrBuilder { + private static final long serialVersionUID = 0L; + // Use BinaryNode.newBuilder() to construct. + private BinaryNode(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private BinaryNode() { + value_ = com.google.protobuf.ByteString.EMPTY; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new BinaryNode(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_BinaryNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_BinaryNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.Builder.class); + } + + private int bitField0_; + public static final int VALUE_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes value = 1; + * @return Whether the value field is set. + */ + @java.lang.Override + public boolean hasValue() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional bytes value = 1; + * @return The value. + */ + @java.lang.Override + public com.google.protobuf.ByteString getValue() { + return value_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + output.writeBytes(1, value_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode other = (org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode) obj; + + if (hasValue() != other.hasValue()) return false; + if (hasValue()) { + if (!getValue() + .equals(other.getValue())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.BinaryNode} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.BinaryNode) + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNodeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_BinaryNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_BinaryNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + value_ = com.google.protobuf.ByteString.EMPTY; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_BinaryNode_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode result = new org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.value_ = value_; + to_bitField0_ |= 0x00000001; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.getDefaultInstance()) return this; + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + value_ = input.readBytes(); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + /** + * optional bytes value = 1; + * @return Whether the value field is set. + */ + @java.lang.Override + public boolean hasValue() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional bytes value = 1; + * @return The value. + */ + @java.lang.Override + public com.google.protobuf.ByteString getValue() { + return value_; + } + /** + * optional bytes value = 1; + * @param value The value to set. + * @return This builder for chaining. + */ + public Builder setValue(com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + value_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional bytes value = 1; + * @return This builder for chaining. + */ + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000001); + value_ = getDefaultInstance().getValue(); + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.BinaryNode) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.BinaryNode) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public BinaryNode parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface DecimalNodeOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.DecimalNode) + com.google.protobuf.MessageOrBuilder { + + /** + * optional string value = 1; + * @return Whether the value field is set. + */ + boolean hasValue(); + /** + * optional string value = 1; + * @return The value. + */ + java.lang.String getValue(); + /** + * optional string value = 1; + * @return The bytes for value. + */ + com.google.protobuf.ByteString + getValueBytes(); + + /** + * optional int32 precision = 2; + * @return Whether the precision field is set. + */ + boolean hasPrecision(); + /** + * optional int32 precision = 2; + * @return The precision. + */ + int getPrecision(); + + /** + * optional int32 scale = 3; + * @return Whether the scale field is set. + */ + boolean hasScale(); + /** + * optional int32 scale = 3; + * @return The scale. + */ + int getScale(); + } + /** + * Protobuf type {@code gandiva.types.DecimalNode} + */ + public static final class DecimalNode extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.DecimalNode) + DecimalNodeOrBuilder { + private static final long serialVersionUID = 0L; + // Use DecimalNode.newBuilder() to construct. + private DecimalNode(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private DecimalNode() { + value_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new DecimalNode(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_DecimalNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_DecimalNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.Builder.class); + } + + private int bitField0_; + public static final int VALUE_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object value_ = ""; + /** + * optional string value = 1; + * @return Whether the value field is set. + */ + @java.lang.Override + public boolean hasValue() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional string value = 1; + * @return The value. + */ + @java.lang.Override + public java.lang.String getValue() { + java.lang.Object ref = value_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + value_ = s; + return s; + } + } + /** + * optional string value = 1; + * @return The bytes for value. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getValueBytes() { + java.lang.Object ref = value_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + value_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int PRECISION_FIELD_NUMBER = 2; + private int precision_ = 0; + /** + * optional int32 precision = 2; + * @return Whether the precision field is set. + */ + @java.lang.Override + public boolean hasPrecision() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * optional int32 precision = 2; + * @return The precision. + */ + @java.lang.Override + public int getPrecision() { + return precision_; + } + + public static final int SCALE_FIELD_NUMBER = 3; + private int scale_ = 0; + /** + * optional int32 scale = 3; + * @return Whether the scale field is set. + */ + @java.lang.Override + public boolean hasScale() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + * optional int32 scale = 3; + * @return The scale. + */ + @java.lang.Override + public int getScale() { + return scale_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, value_); + } + if (((bitField0_ & 0x00000002) != 0)) { + output.writeInt32(2, precision_); + } + if (((bitField0_ & 0x00000004) != 0)) { + output.writeInt32(3, scale_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, value_); + } + if (((bitField0_ & 0x00000002) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(2, precision_); + } + if (((bitField0_ & 0x00000004) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(3, scale_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode other = (org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode) obj; + + if (hasValue() != other.hasValue()) return false; + if (hasValue()) { + if (!getValue() + .equals(other.getValue())) return false; + } + if (hasPrecision() != other.hasPrecision()) return false; + if (hasPrecision()) { + if (getPrecision() + != other.getPrecision()) return false; + } + if (hasScale() != other.hasScale()) return false; + if (hasScale()) { + if (getScale() + != other.getScale()) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + if (hasPrecision()) { + hash = (37 * hash) + PRECISION_FIELD_NUMBER; + hash = (53 * hash) + getPrecision(); + } + if (hasScale()) { + hash = (37 * hash) + SCALE_FIELD_NUMBER; + hash = (53 * hash) + getScale(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.DecimalNode} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.DecimalNode) + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNodeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_DecimalNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_DecimalNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + value_ = ""; + precision_ = 0; + scale_ = 0; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_DecimalNode_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode result = new org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.value_ = value_; + to_bitField0_ |= 0x00000001; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.precision_ = precision_; + to_bitField0_ |= 0x00000002; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.scale_ = scale_; + to_bitField0_ |= 0x00000004; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.getDefaultInstance()) return this; + if (other.hasValue()) { + value_ = other.value_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.hasPrecision()) { + setPrecision(other.getPrecision()); + } + if (other.hasScale()) { + setScale(other.getScale()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + value_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 16: { + precision_ = input.readInt32(); + bitField0_ |= 0x00000002; + break; + } // case 16 + case 24: { + scale_ = input.readInt32(); + bitField0_ |= 0x00000004; + break; + } // case 24 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object value_ = ""; + /** + * optional string value = 1; + * @return Whether the value field is set. + */ + public boolean hasValue() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional string value = 1; + * @return The value. + */ + public java.lang.String getValue() { + java.lang.Object ref = value_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + value_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string value = 1; + * @return The bytes for value. + */ + public com.google.protobuf.ByteString + getValueBytes() { + java.lang.Object ref = value_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + value_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string value = 1; + * @param value The value to set. + * @return This builder for chaining. + */ + public Builder setValue( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + value_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional string value = 1; + * @return This builder for chaining. + */ + public Builder clearValue() { + value_ = getDefaultInstance().getValue(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * optional string value = 1; + * @param value The bytes for value to set. + * @return This builder for chaining. + */ + public Builder setValueBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + value_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private int precision_ ; + /** + * optional int32 precision = 2; + * @return Whether the precision field is set. + */ + @java.lang.Override + public boolean hasPrecision() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * optional int32 precision = 2; + * @return The precision. + */ + @java.lang.Override + public int getPrecision() { + return precision_; + } + /** + * optional int32 precision = 2; + * @param value The precision to set. + * @return This builder for chaining. + */ + public Builder setPrecision(int value) { + + precision_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * optional int32 precision = 2; + * @return This builder for chaining. + */ + public Builder clearPrecision() { + bitField0_ = (bitField0_ & ~0x00000002); + precision_ = 0; + onChanged(); + return this; + } + + private int scale_ ; + /** + * optional int32 scale = 3; + * @return Whether the scale field is set. + */ + @java.lang.Override + public boolean hasScale() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + * optional int32 scale = 3; + * @return The scale. + */ + @java.lang.Override + public int getScale() { + return scale_; + } + /** + * optional int32 scale = 3; + * @param value The scale to set. + * @return This builder for chaining. + */ + public Builder setScale(int value) { + + scale_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * optional int32 scale = 3; + * @return This builder for chaining. + */ + public Builder clearScale() { + bitField0_ = (bitField0_ & ~0x00000004); + scale_ = 0; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.DecimalNode) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.DecimalNode) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public DecimalNode parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface TreeNodeOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.TreeNode) + com.google.protobuf.MessageOrBuilder { + + /** + * optional .gandiva.types.FieldNode fieldNode = 1; + * @return Whether the fieldNode field is set. + */ + boolean hasFieldNode(); + /** + * optional .gandiva.types.FieldNode fieldNode = 1; + * @return The fieldNode. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode getFieldNode(); + /** + * optional .gandiva.types.FieldNode fieldNode = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNodeOrBuilder getFieldNodeOrBuilder(); + + /** + * optional .gandiva.types.FunctionNode fnNode = 2; + * @return Whether the fnNode field is set. + */ + boolean hasFnNode(); + /** + * optional .gandiva.types.FunctionNode fnNode = 2; + * @return The fnNode. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode getFnNode(); + /** + * optional .gandiva.types.FunctionNode fnNode = 2; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNodeOrBuilder getFnNodeOrBuilder(); + + /** + *
+     * control expressions
+     * 
+ * + * optional .gandiva.types.IfNode ifNode = 6; + * @return Whether the ifNode field is set. + */ + boolean hasIfNode(); + /** + *
+     * control expressions
+     * 
+ * + * optional .gandiva.types.IfNode ifNode = 6; + * @return The ifNode. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode getIfNode(); + /** + *
+     * control expressions
+     * 
+ * + * optional .gandiva.types.IfNode ifNode = 6; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.IfNodeOrBuilder getIfNodeOrBuilder(); + + /** + * optional .gandiva.types.AndNode andNode = 7; + * @return Whether the andNode field is set. + */ + boolean hasAndNode(); + /** + * optional .gandiva.types.AndNode andNode = 7; + * @return The andNode. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode getAndNode(); + /** + * optional .gandiva.types.AndNode andNode = 7; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.AndNodeOrBuilder getAndNodeOrBuilder(); + + /** + * optional .gandiva.types.OrNode orNode = 8; + * @return Whether the orNode field is set. + */ + boolean hasOrNode(); + /** + * optional .gandiva.types.OrNode orNode = 8; + * @return The orNode. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode getOrNode(); + /** + * optional .gandiva.types.OrNode orNode = 8; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.OrNodeOrBuilder getOrNodeOrBuilder(); + + /** + *
+     * literals
+     * 
+ * + * optional .gandiva.types.NullNode nullNode = 11; + * @return Whether the nullNode field is set. + */ + boolean hasNullNode(); + /** + *
+     * literals
+     * 
+ * + * optional .gandiva.types.NullNode nullNode = 11; + * @return The nullNode. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode getNullNode(); + /** + *
+     * literals
+     * 
+ * + * optional .gandiva.types.NullNode nullNode = 11; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.NullNodeOrBuilder getNullNodeOrBuilder(); + + /** + * optional .gandiva.types.IntNode intNode = 12; + * @return Whether the intNode field is set. + */ + boolean hasIntNode(); + /** + * optional .gandiva.types.IntNode intNode = 12; + * @return The intNode. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode getIntNode(); + /** + * optional .gandiva.types.IntNode intNode = 12; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.IntNodeOrBuilder getIntNodeOrBuilder(); + + /** + * optional .gandiva.types.FloatNode floatNode = 13; + * @return Whether the floatNode field is set. + */ + boolean hasFloatNode(); + /** + * optional .gandiva.types.FloatNode floatNode = 13; + * @return The floatNode. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode getFloatNode(); + /** + * optional .gandiva.types.FloatNode floatNode = 13; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNodeOrBuilder getFloatNodeOrBuilder(); + + /** + * optional .gandiva.types.LongNode longNode = 14; + * @return Whether the longNode field is set. + */ + boolean hasLongNode(); + /** + * optional .gandiva.types.LongNode longNode = 14; + * @return The longNode. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode getLongNode(); + /** + * optional .gandiva.types.LongNode longNode = 14; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.LongNodeOrBuilder getLongNodeOrBuilder(); + + /** + * optional .gandiva.types.BooleanNode booleanNode = 15; + * @return Whether the booleanNode field is set. + */ + boolean hasBooleanNode(); + /** + * optional .gandiva.types.BooleanNode booleanNode = 15; + * @return The booleanNode. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode getBooleanNode(); + /** + * optional .gandiva.types.BooleanNode booleanNode = 15; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNodeOrBuilder getBooleanNodeOrBuilder(); + + /** + * optional .gandiva.types.DoubleNode doubleNode = 16; + * @return Whether the doubleNode field is set. + */ + boolean hasDoubleNode(); + /** + * optional .gandiva.types.DoubleNode doubleNode = 16; + * @return The doubleNode. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode getDoubleNode(); + /** + * optional .gandiva.types.DoubleNode doubleNode = 16; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNodeOrBuilder getDoubleNodeOrBuilder(); + + /** + * optional .gandiva.types.StringNode stringNode = 17; + * @return Whether the stringNode field is set. + */ + boolean hasStringNode(); + /** + * optional .gandiva.types.StringNode stringNode = 17; + * @return The stringNode. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode getStringNode(); + /** + * optional .gandiva.types.StringNode stringNode = 17; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.StringNodeOrBuilder getStringNodeOrBuilder(); + + /** + * optional .gandiva.types.BinaryNode binaryNode = 18; + * @return Whether the binaryNode field is set. + */ + boolean hasBinaryNode(); + /** + * optional .gandiva.types.BinaryNode binaryNode = 18; + * @return The binaryNode. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode getBinaryNode(); + /** + * optional .gandiva.types.BinaryNode binaryNode = 18; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNodeOrBuilder getBinaryNodeOrBuilder(); + + /** + * optional .gandiva.types.DecimalNode decimalNode = 19; + * @return Whether the decimalNode field is set. + */ + boolean hasDecimalNode(); + /** + * optional .gandiva.types.DecimalNode decimalNode = 19; + * @return The decimalNode. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode getDecimalNode(); + /** + * optional .gandiva.types.DecimalNode decimalNode = 19; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNodeOrBuilder getDecimalNodeOrBuilder(); + + /** + *
+     * in expr
+     * 
+ * + * optional .gandiva.types.InNode inNode = 21; + * @return Whether the inNode field is set. + */ + boolean hasInNode(); + /** + *
+     * in expr
+     * 
+ * + * optional .gandiva.types.InNode inNode = 21; + * @return The inNode. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.InNode getInNode(); + /** + *
+     * in expr
+     * 
+ * + * optional .gandiva.types.InNode inNode = 21; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.InNodeOrBuilder getInNodeOrBuilder(); + } + /** + * Protobuf type {@code gandiva.types.TreeNode} + */ + public static final class TreeNode extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.TreeNode) + TreeNodeOrBuilder { + private static final long serialVersionUID = 0L; + // Use TreeNode.newBuilder() to construct. + private TreeNode(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private TreeNode() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new TreeNode(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_TreeNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_TreeNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder.class); + } + + private int bitField0_; + public static final int FIELDNODE_FIELD_NUMBER = 1; + private org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode fieldNode_; + /** + * optional .gandiva.types.FieldNode fieldNode = 1; + * @return Whether the fieldNode field is set. + */ + @java.lang.Override + public boolean hasFieldNode() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional .gandiva.types.FieldNode fieldNode = 1; + * @return The fieldNode. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode getFieldNode() { + return fieldNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode.getDefaultInstance() : fieldNode_; + } + /** + * optional .gandiva.types.FieldNode fieldNode = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNodeOrBuilder getFieldNodeOrBuilder() { + return fieldNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode.getDefaultInstance() : fieldNode_; + } + + public static final int FNNODE_FIELD_NUMBER = 2; + private org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode fnNode_; + /** + * optional .gandiva.types.FunctionNode fnNode = 2; + * @return Whether the fnNode field is set. + */ + @java.lang.Override + public boolean hasFnNode() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * optional .gandiva.types.FunctionNode fnNode = 2; + * @return The fnNode. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode getFnNode() { + return fnNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode.getDefaultInstance() : fnNode_; + } + /** + * optional .gandiva.types.FunctionNode fnNode = 2; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNodeOrBuilder getFnNodeOrBuilder() { + return fnNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode.getDefaultInstance() : fnNode_; + } + + public static final int IFNODE_FIELD_NUMBER = 6; + private org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode ifNode_; + /** + *
+     * control expressions
+     * 
+ * + * optional .gandiva.types.IfNode ifNode = 6; + * @return Whether the ifNode field is set. + */ + @java.lang.Override + public boolean hasIfNode() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + *
+     * control expressions
+     * 
+ * + * optional .gandiva.types.IfNode ifNode = 6; + * @return The ifNode. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode getIfNode() { + return ifNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode.getDefaultInstance() : ifNode_; + } + /** + *
+     * control expressions
+     * 
+ * + * optional .gandiva.types.IfNode ifNode = 6; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.IfNodeOrBuilder getIfNodeOrBuilder() { + return ifNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode.getDefaultInstance() : ifNode_; + } + + public static final int ANDNODE_FIELD_NUMBER = 7; + private org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode andNode_; + /** + * optional .gandiva.types.AndNode andNode = 7; + * @return Whether the andNode field is set. + */ + @java.lang.Override + public boolean hasAndNode() { + return ((bitField0_ & 0x00000008) != 0); + } + /** + * optional .gandiva.types.AndNode andNode = 7; + * @return The andNode. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode getAndNode() { + return andNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode.getDefaultInstance() : andNode_; + } + /** + * optional .gandiva.types.AndNode andNode = 7; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.AndNodeOrBuilder getAndNodeOrBuilder() { + return andNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode.getDefaultInstance() : andNode_; + } + + public static final int ORNODE_FIELD_NUMBER = 8; + private org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode orNode_; + /** + * optional .gandiva.types.OrNode orNode = 8; + * @return Whether the orNode field is set. + */ + @java.lang.Override + public boolean hasOrNode() { + return ((bitField0_ & 0x00000010) != 0); + } + /** + * optional .gandiva.types.OrNode orNode = 8; + * @return The orNode. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode getOrNode() { + return orNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode.getDefaultInstance() : orNode_; + } + /** + * optional .gandiva.types.OrNode orNode = 8; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.OrNodeOrBuilder getOrNodeOrBuilder() { + return orNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode.getDefaultInstance() : orNode_; + } + + public static final int NULLNODE_FIELD_NUMBER = 11; + private org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode nullNode_; + /** + *
+     * literals
+     * 
+ * + * optional .gandiva.types.NullNode nullNode = 11; + * @return Whether the nullNode field is set. + */ + @java.lang.Override + public boolean hasNullNode() { + return ((bitField0_ & 0x00000020) != 0); + } + /** + *
+     * literals
+     * 
+ * + * optional .gandiva.types.NullNode nullNode = 11; + * @return The nullNode. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode getNullNode() { + return nullNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode.getDefaultInstance() : nullNode_; + } + /** + *
+     * literals
+     * 
+ * + * optional .gandiva.types.NullNode nullNode = 11; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.NullNodeOrBuilder getNullNodeOrBuilder() { + return nullNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode.getDefaultInstance() : nullNode_; + } + + public static final int INTNODE_FIELD_NUMBER = 12; + private org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode intNode_; + /** + * optional .gandiva.types.IntNode intNode = 12; + * @return Whether the intNode field is set. + */ + @java.lang.Override + public boolean hasIntNode() { + return ((bitField0_ & 0x00000040) != 0); + } + /** + * optional .gandiva.types.IntNode intNode = 12; + * @return The intNode. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode getIntNode() { + return intNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.getDefaultInstance() : intNode_; + } + /** + * optional .gandiva.types.IntNode intNode = 12; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntNodeOrBuilder getIntNodeOrBuilder() { + return intNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.getDefaultInstance() : intNode_; + } + + public static final int FLOATNODE_FIELD_NUMBER = 13; + private org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode floatNode_; + /** + * optional .gandiva.types.FloatNode floatNode = 13; + * @return Whether the floatNode field is set. + */ + @java.lang.Override + public boolean hasFloatNode() { + return ((bitField0_ & 0x00000080) != 0); + } + /** + * optional .gandiva.types.FloatNode floatNode = 13; + * @return The floatNode. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode getFloatNode() { + return floatNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.getDefaultInstance() : floatNode_; + } + /** + * optional .gandiva.types.FloatNode floatNode = 13; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNodeOrBuilder getFloatNodeOrBuilder() { + return floatNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.getDefaultInstance() : floatNode_; + } + + public static final int LONGNODE_FIELD_NUMBER = 14; + private org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode longNode_; + /** + * optional .gandiva.types.LongNode longNode = 14; + * @return Whether the longNode field is set. + */ + @java.lang.Override + public boolean hasLongNode() { + return ((bitField0_ & 0x00000100) != 0); + } + /** + * optional .gandiva.types.LongNode longNode = 14; + * @return The longNode. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode getLongNode() { + return longNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.getDefaultInstance() : longNode_; + } + /** + * optional .gandiva.types.LongNode longNode = 14; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongNodeOrBuilder getLongNodeOrBuilder() { + return longNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.getDefaultInstance() : longNode_; + } + + public static final int BOOLEANNODE_FIELD_NUMBER = 15; + private org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode booleanNode_; + /** + * optional .gandiva.types.BooleanNode booleanNode = 15; + * @return Whether the booleanNode field is set. + */ + @java.lang.Override + public boolean hasBooleanNode() { + return ((bitField0_ & 0x00000200) != 0); + } + /** + * optional .gandiva.types.BooleanNode booleanNode = 15; + * @return The booleanNode. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode getBooleanNode() { + return booleanNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode.getDefaultInstance() : booleanNode_; + } + /** + * optional .gandiva.types.BooleanNode booleanNode = 15; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNodeOrBuilder getBooleanNodeOrBuilder() { + return booleanNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode.getDefaultInstance() : booleanNode_; + } + + public static final int DOUBLENODE_FIELD_NUMBER = 16; + private org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode doubleNode_; + /** + * optional .gandiva.types.DoubleNode doubleNode = 16; + * @return Whether the doubleNode field is set. + */ + @java.lang.Override + public boolean hasDoubleNode() { + return ((bitField0_ & 0x00000400) != 0); + } + /** + * optional .gandiva.types.DoubleNode doubleNode = 16; + * @return The doubleNode. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode getDoubleNode() { + return doubleNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.getDefaultInstance() : doubleNode_; + } + /** + * optional .gandiva.types.DoubleNode doubleNode = 16; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNodeOrBuilder getDoubleNodeOrBuilder() { + return doubleNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.getDefaultInstance() : doubleNode_; + } + + public static final int STRINGNODE_FIELD_NUMBER = 17; + private org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode stringNode_; + /** + * optional .gandiva.types.StringNode stringNode = 17; + * @return Whether the stringNode field is set. + */ + @java.lang.Override + public boolean hasStringNode() { + return ((bitField0_ & 0x00000800) != 0); + } + /** + * optional .gandiva.types.StringNode stringNode = 17; + * @return The stringNode. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode getStringNode() { + return stringNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.getDefaultInstance() : stringNode_; + } + /** + * optional .gandiva.types.StringNode stringNode = 17; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringNodeOrBuilder getStringNodeOrBuilder() { + return stringNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.getDefaultInstance() : stringNode_; + } + + public static final int BINARYNODE_FIELD_NUMBER = 18; + private org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode binaryNode_; + /** + * optional .gandiva.types.BinaryNode binaryNode = 18; + * @return Whether the binaryNode field is set. + */ + @java.lang.Override + public boolean hasBinaryNode() { + return ((bitField0_ & 0x00001000) != 0); + } + /** + * optional .gandiva.types.BinaryNode binaryNode = 18; + * @return The binaryNode. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode getBinaryNode() { + return binaryNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.getDefaultInstance() : binaryNode_; + } + /** + * optional .gandiva.types.BinaryNode binaryNode = 18; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNodeOrBuilder getBinaryNodeOrBuilder() { + return binaryNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.getDefaultInstance() : binaryNode_; + } + + public static final int DECIMALNODE_FIELD_NUMBER = 19; + private org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode decimalNode_; + /** + * optional .gandiva.types.DecimalNode decimalNode = 19; + * @return Whether the decimalNode field is set. + */ + @java.lang.Override + public boolean hasDecimalNode() { + return ((bitField0_ & 0x00002000) != 0); + } + /** + * optional .gandiva.types.DecimalNode decimalNode = 19; + * @return The decimalNode. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode getDecimalNode() { + return decimalNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.getDefaultInstance() : decimalNode_; + } + /** + * optional .gandiva.types.DecimalNode decimalNode = 19; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNodeOrBuilder getDecimalNodeOrBuilder() { + return decimalNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.getDefaultInstance() : decimalNode_; + } + + public static final int INNODE_FIELD_NUMBER = 21; + private org.apache.arrow.gandiva.ipc.GandivaTypes.InNode inNode_; + /** + *
+     * in expr
+     * 
+ * + * optional .gandiva.types.InNode inNode = 21; + * @return Whether the inNode field is set. + */ + @java.lang.Override + public boolean hasInNode() { + return ((bitField0_ & 0x00004000) != 0); + } + /** + *
+     * in expr
+     * 
+ * + * optional .gandiva.types.InNode inNode = 21; + * @return The inNode. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.InNode getInNode() { + return inNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.InNode.getDefaultInstance() : inNode_; + } + /** + *
+     * in expr
+     * 
+ * + * optional .gandiva.types.InNode inNode = 21; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.InNodeOrBuilder getInNodeOrBuilder() { + return inNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.InNode.getDefaultInstance() : inNode_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + output.writeMessage(1, getFieldNode()); + } + if (((bitField0_ & 0x00000002) != 0)) { + output.writeMessage(2, getFnNode()); + } + if (((bitField0_ & 0x00000004) != 0)) { + output.writeMessage(6, getIfNode()); + } + if (((bitField0_ & 0x00000008) != 0)) { + output.writeMessage(7, getAndNode()); + } + if (((bitField0_ & 0x00000010) != 0)) { + output.writeMessage(8, getOrNode()); + } + if (((bitField0_ & 0x00000020) != 0)) { + output.writeMessage(11, getNullNode()); + } + if (((bitField0_ & 0x00000040) != 0)) { + output.writeMessage(12, getIntNode()); + } + if (((bitField0_ & 0x00000080) != 0)) { + output.writeMessage(13, getFloatNode()); + } + if (((bitField0_ & 0x00000100) != 0)) { + output.writeMessage(14, getLongNode()); + } + if (((bitField0_ & 0x00000200) != 0)) { + output.writeMessage(15, getBooleanNode()); + } + if (((bitField0_ & 0x00000400) != 0)) { + output.writeMessage(16, getDoubleNode()); + } + if (((bitField0_ & 0x00000800) != 0)) { + output.writeMessage(17, getStringNode()); + } + if (((bitField0_ & 0x00001000) != 0)) { + output.writeMessage(18, getBinaryNode()); + } + if (((bitField0_ & 0x00002000) != 0)) { + output.writeMessage(19, getDecimalNode()); + } + if (((bitField0_ & 0x00004000) != 0)) { + output.writeMessage(21, getInNode()); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getFieldNode()); + } + if (((bitField0_ & 0x00000002) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, getFnNode()); + } + if (((bitField0_ & 0x00000004) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(6, getIfNode()); + } + if (((bitField0_ & 0x00000008) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(7, getAndNode()); + } + if (((bitField0_ & 0x00000010) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(8, getOrNode()); + } + if (((bitField0_ & 0x00000020) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(11, getNullNode()); + } + if (((bitField0_ & 0x00000040) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(12, getIntNode()); + } + if (((bitField0_ & 0x00000080) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(13, getFloatNode()); + } + if (((bitField0_ & 0x00000100) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(14, getLongNode()); + } + if (((bitField0_ & 0x00000200) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(15, getBooleanNode()); + } + if (((bitField0_ & 0x00000400) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(16, getDoubleNode()); + } + if (((bitField0_ & 0x00000800) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(17, getStringNode()); + } + if (((bitField0_ & 0x00001000) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(18, getBinaryNode()); + } + if (((bitField0_ & 0x00002000) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(19, getDecimalNode()); + } + if (((bitField0_ & 0x00004000) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(21, getInNode()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode other = (org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode) obj; + + if (hasFieldNode() != other.hasFieldNode()) return false; + if (hasFieldNode()) { + if (!getFieldNode() + .equals(other.getFieldNode())) return false; + } + if (hasFnNode() != other.hasFnNode()) return false; + if (hasFnNode()) { + if (!getFnNode() + .equals(other.getFnNode())) return false; + } + if (hasIfNode() != other.hasIfNode()) return false; + if (hasIfNode()) { + if (!getIfNode() + .equals(other.getIfNode())) return false; + } + if (hasAndNode() != other.hasAndNode()) return false; + if (hasAndNode()) { + if (!getAndNode() + .equals(other.getAndNode())) return false; + } + if (hasOrNode() != other.hasOrNode()) return false; + if (hasOrNode()) { + if (!getOrNode() + .equals(other.getOrNode())) return false; + } + if (hasNullNode() != other.hasNullNode()) return false; + if (hasNullNode()) { + if (!getNullNode() + .equals(other.getNullNode())) return false; + } + if (hasIntNode() != other.hasIntNode()) return false; + if (hasIntNode()) { + if (!getIntNode() + .equals(other.getIntNode())) return false; + } + if (hasFloatNode() != other.hasFloatNode()) return false; + if (hasFloatNode()) { + if (!getFloatNode() + .equals(other.getFloatNode())) return false; + } + if (hasLongNode() != other.hasLongNode()) return false; + if (hasLongNode()) { + if (!getLongNode() + .equals(other.getLongNode())) return false; + } + if (hasBooleanNode() != other.hasBooleanNode()) return false; + if (hasBooleanNode()) { + if (!getBooleanNode() + .equals(other.getBooleanNode())) return false; + } + if (hasDoubleNode() != other.hasDoubleNode()) return false; + if (hasDoubleNode()) { + if (!getDoubleNode() + .equals(other.getDoubleNode())) return false; + } + if (hasStringNode() != other.hasStringNode()) return false; + if (hasStringNode()) { + if (!getStringNode() + .equals(other.getStringNode())) return false; + } + if (hasBinaryNode() != other.hasBinaryNode()) return false; + if (hasBinaryNode()) { + if (!getBinaryNode() + .equals(other.getBinaryNode())) return false; + } + if (hasDecimalNode() != other.hasDecimalNode()) return false; + if (hasDecimalNode()) { + if (!getDecimalNode() + .equals(other.getDecimalNode())) return false; + } + if (hasInNode() != other.hasInNode()) return false; + if (hasInNode()) { + if (!getInNode() + .equals(other.getInNode())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasFieldNode()) { + hash = (37 * hash) + FIELDNODE_FIELD_NUMBER; + hash = (53 * hash) + getFieldNode().hashCode(); + } + if (hasFnNode()) { + hash = (37 * hash) + FNNODE_FIELD_NUMBER; + hash = (53 * hash) + getFnNode().hashCode(); + } + if (hasIfNode()) { + hash = (37 * hash) + IFNODE_FIELD_NUMBER; + hash = (53 * hash) + getIfNode().hashCode(); + } + if (hasAndNode()) { + hash = (37 * hash) + ANDNODE_FIELD_NUMBER; + hash = (53 * hash) + getAndNode().hashCode(); + } + if (hasOrNode()) { + hash = (37 * hash) + ORNODE_FIELD_NUMBER; + hash = (53 * hash) + getOrNode().hashCode(); + } + if (hasNullNode()) { + hash = (37 * hash) + NULLNODE_FIELD_NUMBER; + hash = (53 * hash) + getNullNode().hashCode(); + } + if (hasIntNode()) { + hash = (37 * hash) + INTNODE_FIELD_NUMBER; + hash = (53 * hash) + getIntNode().hashCode(); + } + if (hasFloatNode()) { + hash = (37 * hash) + FLOATNODE_FIELD_NUMBER; + hash = (53 * hash) + getFloatNode().hashCode(); + } + if (hasLongNode()) { + hash = (37 * hash) + LONGNODE_FIELD_NUMBER; + hash = (53 * hash) + getLongNode().hashCode(); + } + if (hasBooleanNode()) { + hash = (37 * hash) + BOOLEANNODE_FIELD_NUMBER; + hash = (53 * hash) + getBooleanNode().hashCode(); + } + if (hasDoubleNode()) { + hash = (37 * hash) + DOUBLENODE_FIELD_NUMBER; + hash = (53 * hash) + getDoubleNode().hashCode(); + } + if (hasStringNode()) { + hash = (37 * hash) + STRINGNODE_FIELD_NUMBER; + hash = (53 * hash) + getStringNode().hashCode(); + } + if (hasBinaryNode()) { + hash = (37 * hash) + BINARYNODE_FIELD_NUMBER; + hash = (53 * hash) + getBinaryNode().hashCode(); + } + if (hasDecimalNode()) { + hash = (37 * hash) + DECIMALNODE_FIELD_NUMBER; + hash = (53 * hash) + getDecimalNode().hashCode(); + } + if (hasInNode()) { + hash = (37 * hash) + INNODE_FIELD_NUMBER; + hash = (53 * hash) + getInNode().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.TreeNode} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.TreeNode) + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_TreeNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_TreeNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getFieldNodeFieldBuilder(); + getFnNodeFieldBuilder(); + getIfNodeFieldBuilder(); + getAndNodeFieldBuilder(); + getOrNodeFieldBuilder(); + getNullNodeFieldBuilder(); + getIntNodeFieldBuilder(); + getFloatNodeFieldBuilder(); + getLongNodeFieldBuilder(); + getBooleanNodeFieldBuilder(); + getDoubleNodeFieldBuilder(); + getStringNodeFieldBuilder(); + getBinaryNodeFieldBuilder(); + getDecimalNodeFieldBuilder(); + getInNodeFieldBuilder(); + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + fieldNode_ = null; + if (fieldNodeBuilder_ != null) { + fieldNodeBuilder_.dispose(); + fieldNodeBuilder_ = null; + } + fnNode_ = null; + if (fnNodeBuilder_ != null) { + fnNodeBuilder_.dispose(); + fnNodeBuilder_ = null; + } + ifNode_ = null; + if (ifNodeBuilder_ != null) { + ifNodeBuilder_.dispose(); + ifNodeBuilder_ = null; + } + andNode_ = null; + if (andNodeBuilder_ != null) { + andNodeBuilder_.dispose(); + andNodeBuilder_ = null; + } + orNode_ = null; + if (orNodeBuilder_ != null) { + orNodeBuilder_.dispose(); + orNodeBuilder_ = null; + } + nullNode_ = null; + if (nullNodeBuilder_ != null) { + nullNodeBuilder_.dispose(); + nullNodeBuilder_ = null; + } + intNode_ = null; + if (intNodeBuilder_ != null) { + intNodeBuilder_.dispose(); + intNodeBuilder_ = null; + } + floatNode_ = null; + if (floatNodeBuilder_ != null) { + floatNodeBuilder_.dispose(); + floatNodeBuilder_ = null; + } + longNode_ = null; + if (longNodeBuilder_ != null) { + longNodeBuilder_.dispose(); + longNodeBuilder_ = null; + } + booleanNode_ = null; + if (booleanNodeBuilder_ != null) { + booleanNodeBuilder_.dispose(); + booleanNodeBuilder_ = null; + } + doubleNode_ = null; + if (doubleNodeBuilder_ != null) { + doubleNodeBuilder_.dispose(); + doubleNodeBuilder_ = null; + } + stringNode_ = null; + if (stringNodeBuilder_ != null) { + stringNodeBuilder_.dispose(); + stringNodeBuilder_ = null; + } + binaryNode_ = null; + if (binaryNodeBuilder_ != null) { + binaryNodeBuilder_.dispose(); + binaryNodeBuilder_ = null; + } + decimalNode_ = null; + if (decimalNodeBuilder_ != null) { + decimalNodeBuilder_.dispose(); + decimalNodeBuilder_ = null; + } + inNode_ = null; + if (inNodeBuilder_ != null) { + inNodeBuilder_.dispose(); + inNodeBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_TreeNode_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode result = new org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.fieldNode_ = fieldNodeBuilder_ == null + ? fieldNode_ + : fieldNodeBuilder_.build(); + to_bitField0_ |= 0x00000001; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.fnNode_ = fnNodeBuilder_ == null + ? fnNode_ + : fnNodeBuilder_.build(); + to_bitField0_ |= 0x00000002; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.ifNode_ = ifNodeBuilder_ == null + ? ifNode_ + : ifNodeBuilder_.build(); + to_bitField0_ |= 0x00000004; + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.andNode_ = andNodeBuilder_ == null + ? andNode_ + : andNodeBuilder_.build(); + to_bitField0_ |= 0x00000008; + } + if (((from_bitField0_ & 0x00000010) != 0)) { + result.orNode_ = orNodeBuilder_ == null + ? orNode_ + : orNodeBuilder_.build(); + to_bitField0_ |= 0x00000010; + } + if (((from_bitField0_ & 0x00000020) != 0)) { + result.nullNode_ = nullNodeBuilder_ == null + ? nullNode_ + : nullNodeBuilder_.build(); + to_bitField0_ |= 0x00000020; + } + if (((from_bitField0_ & 0x00000040) != 0)) { + result.intNode_ = intNodeBuilder_ == null + ? intNode_ + : intNodeBuilder_.build(); + to_bitField0_ |= 0x00000040; + } + if (((from_bitField0_ & 0x00000080) != 0)) { + result.floatNode_ = floatNodeBuilder_ == null + ? floatNode_ + : floatNodeBuilder_.build(); + to_bitField0_ |= 0x00000080; + } + if (((from_bitField0_ & 0x00000100) != 0)) { + result.longNode_ = longNodeBuilder_ == null + ? longNode_ + : longNodeBuilder_.build(); + to_bitField0_ |= 0x00000100; + } + if (((from_bitField0_ & 0x00000200) != 0)) { + result.booleanNode_ = booleanNodeBuilder_ == null + ? booleanNode_ + : booleanNodeBuilder_.build(); + to_bitField0_ |= 0x00000200; + } + if (((from_bitField0_ & 0x00000400) != 0)) { + result.doubleNode_ = doubleNodeBuilder_ == null + ? doubleNode_ + : doubleNodeBuilder_.build(); + to_bitField0_ |= 0x00000400; + } + if (((from_bitField0_ & 0x00000800) != 0)) { + result.stringNode_ = stringNodeBuilder_ == null + ? stringNode_ + : stringNodeBuilder_.build(); + to_bitField0_ |= 0x00000800; + } + if (((from_bitField0_ & 0x00001000) != 0)) { + result.binaryNode_ = binaryNodeBuilder_ == null + ? binaryNode_ + : binaryNodeBuilder_.build(); + to_bitField0_ |= 0x00001000; + } + if (((from_bitField0_ & 0x00002000) != 0)) { + result.decimalNode_ = decimalNodeBuilder_ == null + ? decimalNode_ + : decimalNodeBuilder_.build(); + to_bitField0_ |= 0x00002000; + } + if (((from_bitField0_ & 0x00004000) != 0)) { + result.inNode_ = inNodeBuilder_ == null + ? inNode_ + : inNodeBuilder_.build(); + to_bitField0_ |= 0x00004000; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance()) return this; + if (other.hasFieldNode()) { + mergeFieldNode(other.getFieldNode()); + } + if (other.hasFnNode()) { + mergeFnNode(other.getFnNode()); + } + if (other.hasIfNode()) { + mergeIfNode(other.getIfNode()); + } + if (other.hasAndNode()) { + mergeAndNode(other.getAndNode()); + } + if (other.hasOrNode()) { + mergeOrNode(other.getOrNode()); + } + if (other.hasNullNode()) { + mergeNullNode(other.getNullNode()); + } + if (other.hasIntNode()) { + mergeIntNode(other.getIntNode()); + } + if (other.hasFloatNode()) { + mergeFloatNode(other.getFloatNode()); + } + if (other.hasLongNode()) { + mergeLongNode(other.getLongNode()); + } + if (other.hasBooleanNode()) { + mergeBooleanNode(other.getBooleanNode()); + } + if (other.hasDoubleNode()) { + mergeDoubleNode(other.getDoubleNode()); + } + if (other.hasStringNode()) { + mergeStringNode(other.getStringNode()); + } + if (other.hasBinaryNode()) { + mergeBinaryNode(other.getBinaryNode()); + } + if (other.hasDecimalNode()) { + mergeDecimalNode(other.getDecimalNode()); + } + if (other.hasInNode()) { + mergeInNode(other.getInNode()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + input.readMessage( + getFieldNodeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + input.readMessage( + getFnNodeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 50: { + input.readMessage( + getIfNodeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000004; + break; + } // case 50 + case 58: { + input.readMessage( + getAndNodeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000008; + break; + } // case 58 + case 66: { + input.readMessage( + getOrNodeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000010; + break; + } // case 66 + case 90: { + input.readMessage( + getNullNodeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000020; + break; + } // case 90 + case 98: { + input.readMessage( + getIntNodeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000040; + break; + } // case 98 + case 106: { + input.readMessage( + getFloatNodeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000080; + break; + } // case 106 + case 114: { + input.readMessage( + getLongNodeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000100; + break; + } // case 114 + case 122: { + input.readMessage( + getBooleanNodeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000200; + break; + } // case 122 + case 130: { + input.readMessage( + getDoubleNodeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000400; + break; + } // case 130 + case 138: { + input.readMessage( + getStringNodeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000800; + break; + } // case 138 + case 146: { + input.readMessage( + getBinaryNodeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00001000; + break; + } // case 146 + case 154: { + input.readMessage( + getDecimalNodeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00002000; + break; + } // case 154 + case 170: { + input.readMessage( + getInNodeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00004000; + break; + } // case 170 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode fieldNode_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode, org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNodeOrBuilder> fieldNodeBuilder_; + /** + * optional .gandiva.types.FieldNode fieldNode = 1; + * @return Whether the fieldNode field is set. + */ + public boolean hasFieldNode() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional .gandiva.types.FieldNode fieldNode = 1; + * @return The fieldNode. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode getFieldNode() { + if (fieldNodeBuilder_ == null) { + return fieldNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode.getDefaultInstance() : fieldNode_; + } else { + return fieldNodeBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.FieldNode fieldNode = 1; + */ + public Builder setFieldNode(org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode value) { + if (fieldNodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + fieldNode_ = value; + } else { + fieldNodeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional .gandiva.types.FieldNode fieldNode = 1; + */ + public Builder setFieldNode( + org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode.Builder builderForValue) { + if (fieldNodeBuilder_ == null) { + fieldNode_ = builderForValue.build(); + } else { + fieldNodeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional .gandiva.types.FieldNode fieldNode = 1; + */ + public Builder mergeFieldNode(org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode value) { + if (fieldNodeBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0) && + fieldNode_ != null && + fieldNode_ != org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode.getDefaultInstance()) { + getFieldNodeBuilder().mergeFrom(value); + } else { + fieldNode_ = value; + } + } else { + fieldNodeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional .gandiva.types.FieldNode fieldNode = 1; + */ + public Builder clearFieldNode() { + bitField0_ = (bitField0_ & ~0x00000001); + fieldNode_ = null; + if (fieldNodeBuilder_ != null) { + fieldNodeBuilder_.dispose(); + fieldNodeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.FieldNode fieldNode = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode.Builder getFieldNodeBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getFieldNodeFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.FieldNode fieldNode = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNodeOrBuilder getFieldNodeOrBuilder() { + if (fieldNodeBuilder_ != null) { + return fieldNodeBuilder_.getMessageOrBuilder(); + } else { + return fieldNode_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode.getDefaultInstance() : fieldNode_; + } + } + /** + * optional .gandiva.types.FieldNode fieldNode = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode, org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNodeOrBuilder> + getFieldNodeFieldBuilder() { + if (fieldNodeBuilder_ == null) { + fieldNodeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode, org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FieldNodeOrBuilder>( + getFieldNode(), + getParentForChildren(), + isClean()); + fieldNode_ = null; + } + return fieldNodeBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode fnNode_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode, org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNodeOrBuilder> fnNodeBuilder_; + /** + * optional .gandiva.types.FunctionNode fnNode = 2; + * @return Whether the fnNode field is set. + */ + public boolean hasFnNode() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * optional .gandiva.types.FunctionNode fnNode = 2; + * @return The fnNode. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode getFnNode() { + if (fnNodeBuilder_ == null) { + return fnNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode.getDefaultInstance() : fnNode_; + } else { + return fnNodeBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.FunctionNode fnNode = 2; + */ + public Builder setFnNode(org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode value) { + if (fnNodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + fnNode_ = value; + } else { + fnNodeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * optional .gandiva.types.FunctionNode fnNode = 2; + */ + public Builder setFnNode( + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode.Builder builderForValue) { + if (fnNodeBuilder_ == null) { + fnNode_ = builderForValue.build(); + } else { + fnNodeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * optional .gandiva.types.FunctionNode fnNode = 2; + */ + public Builder mergeFnNode(org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode value) { + if (fnNodeBuilder_ == null) { + if (((bitField0_ & 0x00000002) != 0) && + fnNode_ != null && + fnNode_ != org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode.getDefaultInstance()) { + getFnNodeBuilder().mergeFrom(value); + } else { + fnNode_ = value; + } + } else { + fnNodeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * optional .gandiva.types.FunctionNode fnNode = 2; + */ + public Builder clearFnNode() { + bitField0_ = (bitField0_ & ~0x00000002); + fnNode_ = null; + if (fnNodeBuilder_ != null) { + fnNodeBuilder_.dispose(); + fnNodeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.FunctionNode fnNode = 2; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode.Builder getFnNodeBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getFnNodeFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.FunctionNode fnNode = 2; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNodeOrBuilder getFnNodeOrBuilder() { + if (fnNodeBuilder_ != null) { + return fnNodeBuilder_.getMessageOrBuilder(); + } else { + return fnNode_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode.getDefaultInstance() : fnNode_; + } + } + /** + * optional .gandiva.types.FunctionNode fnNode = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode, org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNodeOrBuilder> + getFnNodeFieldBuilder() { + if (fnNodeBuilder_ == null) { + fnNodeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode, org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionNodeOrBuilder>( + getFnNode(), + getParentForChildren(), + isClean()); + fnNode_ = null; + } + return fnNodeBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode ifNode_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode, org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.IfNodeOrBuilder> ifNodeBuilder_; + /** + *
+       * control expressions
+       * 
+ * + * optional .gandiva.types.IfNode ifNode = 6; + * @return Whether the ifNode field is set. + */ + public boolean hasIfNode() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + *
+       * control expressions
+       * 
+ * + * optional .gandiva.types.IfNode ifNode = 6; + * @return The ifNode. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode getIfNode() { + if (ifNodeBuilder_ == null) { + return ifNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode.getDefaultInstance() : ifNode_; + } else { + return ifNodeBuilder_.getMessage(); + } + } + /** + *
+       * control expressions
+       * 
+ * + * optional .gandiva.types.IfNode ifNode = 6; + */ + public Builder setIfNode(org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode value) { + if (ifNodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ifNode_ = value; + } else { + ifNodeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       * control expressions
+       * 
+ * + * optional .gandiva.types.IfNode ifNode = 6; + */ + public Builder setIfNode( + org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode.Builder builderForValue) { + if (ifNodeBuilder_ == null) { + ifNode_ = builderForValue.build(); + } else { + ifNodeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       * control expressions
+       * 
+ * + * optional .gandiva.types.IfNode ifNode = 6; + */ + public Builder mergeIfNode(org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode value) { + if (ifNodeBuilder_ == null) { + if (((bitField0_ & 0x00000004) != 0) && + ifNode_ != null && + ifNode_ != org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode.getDefaultInstance()) { + getIfNodeBuilder().mergeFrom(value); + } else { + ifNode_ = value; + } + } else { + ifNodeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + *
+       * control expressions
+       * 
+ * + * optional .gandiva.types.IfNode ifNode = 6; + */ + public Builder clearIfNode() { + bitField0_ = (bitField0_ & ~0x00000004); + ifNode_ = null; + if (ifNodeBuilder_ != null) { + ifNodeBuilder_.dispose(); + ifNodeBuilder_ = null; + } + onChanged(); + return this; + } + /** + *
+       * control expressions
+       * 
+ * + * optional .gandiva.types.IfNode ifNode = 6; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode.Builder getIfNodeBuilder() { + bitField0_ |= 0x00000004; + onChanged(); + return getIfNodeFieldBuilder().getBuilder(); + } + /** + *
+       * control expressions
+       * 
+ * + * optional .gandiva.types.IfNode ifNode = 6; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.IfNodeOrBuilder getIfNodeOrBuilder() { + if (ifNodeBuilder_ != null) { + return ifNodeBuilder_.getMessageOrBuilder(); + } else { + return ifNode_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode.getDefaultInstance() : ifNode_; + } + } + /** + *
+       * control expressions
+       * 
+ * + * optional .gandiva.types.IfNode ifNode = 6; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode, org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.IfNodeOrBuilder> + getIfNodeFieldBuilder() { + if (ifNodeBuilder_ == null) { + ifNodeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode, org.apache.arrow.gandiva.ipc.GandivaTypes.IfNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.IfNodeOrBuilder>( + getIfNode(), + getParentForChildren(), + isClean()); + ifNode_ = null; + } + return ifNodeBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode andNode_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode, org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.AndNodeOrBuilder> andNodeBuilder_; + /** + * optional .gandiva.types.AndNode andNode = 7; + * @return Whether the andNode field is set. + */ + public boolean hasAndNode() { + return ((bitField0_ & 0x00000008) != 0); + } + /** + * optional .gandiva.types.AndNode andNode = 7; + * @return The andNode. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode getAndNode() { + if (andNodeBuilder_ == null) { + return andNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode.getDefaultInstance() : andNode_; + } else { + return andNodeBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.AndNode andNode = 7; + */ + public Builder setAndNode(org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode value) { + if (andNodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + andNode_ = value; + } else { + andNodeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * optional .gandiva.types.AndNode andNode = 7; + */ + public Builder setAndNode( + org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode.Builder builderForValue) { + if (andNodeBuilder_ == null) { + andNode_ = builderForValue.build(); + } else { + andNodeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * optional .gandiva.types.AndNode andNode = 7; + */ + public Builder mergeAndNode(org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode value) { + if (andNodeBuilder_ == null) { + if (((bitField0_ & 0x00000008) != 0) && + andNode_ != null && + andNode_ != org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode.getDefaultInstance()) { + getAndNodeBuilder().mergeFrom(value); + } else { + andNode_ = value; + } + } else { + andNodeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * optional .gandiva.types.AndNode andNode = 7; + */ + public Builder clearAndNode() { + bitField0_ = (bitField0_ & ~0x00000008); + andNode_ = null; + if (andNodeBuilder_ != null) { + andNodeBuilder_.dispose(); + andNodeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.AndNode andNode = 7; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode.Builder getAndNodeBuilder() { + bitField0_ |= 0x00000008; + onChanged(); + return getAndNodeFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.AndNode andNode = 7; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.AndNodeOrBuilder getAndNodeOrBuilder() { + if (andNodeBuilder_ != null) { + return andNodeBuilder_.getMessageOrBuilder(); + } else { + return andNode_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode.getDefaultInstance() : andNode_; + } + } + /** + * optional .gandiva.types.AndNode andNode = 7; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode, org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.AndNodeOrBuilder> + getAndNodeFieldBuilder() { + if (andNodeBuilder_ == null) { + andNodeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode, org.apache.arrow.gandiva.ipc.GandivaTypes.AndNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.AndNodeOrBuilder>( + getAndNode(), + getParentForChildren(), + isClean()); + andNode_ = null; + } + return andNodeBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode orNode_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode, org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.OrNodeOrBuilder> orNodeBuilder_; + /** + * optional .gandiva.types.OrNode orNode = 8; + * @return Whether the orNode field is set. + */ + public boolean hasOrNode() { + return ((bitField0_ & 0x00000010) != 0); + } + /** + * optional .gandiva.types.OrNode orNode = 8; + * @return The orNode. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode getOrNode() { + if (orNodeBuilder_ == null) { + return orNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode.getDefaultInstance() : orNode_; + } else { + return orNodeBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.OrNode orNode = 8; + */ + public Builder setOrNode(org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode value) { + if (orNodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + orNode_ = value; + } else { + orNodeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + * optional .gandiva.types.OrNode orNode = 8; + */ + public Builder setOrNode( + org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode.Builder builderForValue) { + if (orNodeBuilder_ == null) { + orNode_ = builderForValue.build(); + } else { + orNodeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + * optional .gandiva.types.OrNode orNode = 8; + */ + public Builder mergeOrNode(org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode value) { + if (orNodeBuilder_ == null) { + if (((bitField0_ & 0x00000010) != 0) && + orNode_ != null && + orNode_ != org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode.getDefaultInstance()) { + getOrNodeBuilder().mergeFrom(value); + } else { + orNode_ = value; + } + } else { + orNodeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + * optional .gandiva.types.OrNode orNode = 8; + */ + public Builder clearOrNode() { + bitField0_ = (bitField0_ & ~0x00000010); + orNode_ = null; + if (orNodeBuilder_ != null) { + orNodeBuilder_.dispose(); + orNodeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.OrNode orNode = 8; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode.Builder getOrNodeBuilder() { + bitField0_ |= 0x00000010; + onChanged(); + return getOrNodeFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.OrNode orNode = 8; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.OrNodeOrBuilder getOrNodeOrBuilder() { + if (orNodeBuilder_ != null) { + return orNodeBuilder_.getMessageOrBuilder(); + } else { + return orNode_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode.getDefaultInstance() : orNode_; + } + } + /** + * optional .gandiva.types.OrNode orNode = 8; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode, org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.OrNodeOrBuilder> + getOrNodeFieldBuilder() { + if (orNodeBuilder_ == null) { + orNodeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode, org.apache.arrow.gandiva.ipc.GandivaTypes.OrNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.OrNodeOrBuilder>( + getOrNode(), + getParentForChildren(), + isClean()); + orNode_ = null; + } + return orNodeBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode nullNode_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode, org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.NullNodeOrBuilder> nullNodeBuilder_; + /** + *
+       * literals
+       * 
+ * + * optional .gandiva.types.NullNode nullNode = 11; + * @return Whether the nullNode field is set. + */ + public boolean hasNullNode() { + return ((bitField0_ & 0x00000020) != 0); + } + /** + *
+       * literals
+       * 
+ * + * optional .gandiva.types.NullNode nullNode = 11; + * @return The nullNode. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode getNullNode() { + if (nullNodeBuilder_ == null) { + return nullNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode.getDefaultInstance() : nullNode_; + } else { + return nullNodeBuilder_.getMessage(); + } + } + /** + *
+       * literals
+       * 
+ * + * optional .gandiva.types.NullNode nullNode = 11; + */ + public Builder setNullNode(org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode value) { + if (nullNodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + nullNode_ = value; + } else { + nullNodeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + /** + *
+       * literals
+       * 
+ * + * optional .gandiva.types.NullNode nullNode = 11; + */ + public Builder setNullNode( + org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode.Builder builderForValue) { + if (nullNodeBuilder_ == null) { + nullNode_ = builderForValue.build(); + } else { + nullNodeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + /** + *
+       * literals
+       * 
+ * + * optional .gandiva.types.NullNode nullNode = 11; + */ + public Builder mergeNullNode(org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode value) { + if (nullNodeBuilder_ == null) { + if (((bitField0_ & 0x00000020) != 0) && + nullNode_ != null && + nullNode_ != org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode.getDefaultInstance()) { + getNullNodeBuilder().mergeFrom(value); + } else { + nullNode_ = value; + } + } else { + nullNodeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + /** + *
+       * literals
+       * 
+ * + * optional .gandiva.types.NullNode nullNode = 11; + */ + public Builder clearNullNode() { + bitField0_ = (bitField0_ & ~0x00000020); + nullNode_ = null; + if (nullNodeBuilder_ != null) { + nullNodeBuilder_.dispose(); + nullNodeBuilder_ = null; + } + onChanged(); + return this; + } + /** + *
+       * literals
+       * 
+ * + * optional .gandiva.types.NullNode nullNode = 11; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode.Builder getNullNodeBuilder() { + bitField0_ |= 0x00000020; + onChanged(); + return getNullNodeFieldBuilder().getBuilder(); + } + /** + *
+       * literals
+       * 
+ * + * optional .gandiva.types.NullNode nullNode = 11; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.NullNodeOrBuilder getNullNodeOrBuilder() { + if (nullNodeBuilder_ != null) { + return nullNodeBuilder_.getMessageOrBuilder(); + } else { + return nullNode_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode.getDefaultInstance() : nullNode_; + } + } + /** + *
+       * literals
+       * 
+ * + * optional .gandiva.types.NullNode nullNode = 11; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode, org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.NullNodeOrBuilder> + getNullNodeFieldBuilder() { + if (nullNodeBuilder_ == null) { + nullNodeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode, org.apache.arrow.gandiva.ipc.GandivaTypes.NullNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.NullNodeOrBuilder>( + getNullNode(), + getParentForChildren(), + isClean()); + nullNode_ = null; + } + return nullNodeBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode intNode_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode, org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.IntNodeOrBuilder> intNodeBuilder_; + /** + * optional .gandiva.types.IntNode intNode = 12; + * @return Whether the intNode field is set. + */ + public boolean hasIntNode() { + return ((bitField0_ & 0x00000040) != 0); + } + /** + * optional .gandiva.types.IntNode intNode = 12; + * @return The intNode. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode getIntNode() { + if (intNodeBuilder_ == null) { + return intNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.getDefaultInstance() : intNode_; + } else { + return intNodeBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.IntNode intNode = 12; + */ + public Builder setIntNode(org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode value) { + if (intNodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + intNode_ = value; + } else { + intNodeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + /** + * optional .gandiva.types.IntNode intNode = 12; + */ + public Builder setIntNode( + org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.Builder builderForValue) { + if (intNodeBuilder_ == null) { + intNode_ = builderForValue.build(); + } else { + intNodeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + /** + * optional .gandiva.types.IntNode intNode = 12; + */ + public Builder mergeIntNode(org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode value) { + if (intNodeBuilder_ == null) { + if (((bitField0_ & 0x00000040) != 0) && + intNode_ != null && + intNode_ != org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.getDefaultInstance()) { + getIntNodeBuilder().mergeFrom(value); + } else { + intNode_ = value; + } + } else { + intNodeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + /** + * optional .gandiva.types.IntNode intNode = 12; + */ + public Builder clearIntNode() { + bitField0_ = (bitField0_ & ~0x00000040); + intNode_ = null; + if (intNodeBuilder_ != null) { + intNodeBuilder_.dispose(); + intNodeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.IntNode intNode = 12; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.Builder getIntNodeBuilder() { + bitField0_ |= 0x00000040; + onChanged(); + return getIntNodeFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.IntNode intNode = 12; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntNodeOrBuilder getIntNodeOrBuilder() { + if (intNodeBuilder_ != null) { + return intNodeBuilder_.getMessageOrBuilder(); + } else { + return intNode_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.getDefaultInstance() : intNode_; + } + } + /** + * optional .gandiva.types.IntNode intNode = 12; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode, org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.IntNodeOrBuilder> + getIntNodeFieldBuilder() { + if (intNodeBuilder_ == null) { + intNodeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode, org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.IntNodeOrBuilder>( + getIntNode(), + getParentForChildren(), + isClean()); + intNode_ = null; + } + return intNodeBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode floatNode_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNodeOrBuilder> floatNodeBuilder_; + /** + * optional .gandiva.types.FloatNode floatNode = 13; + * @return Whether the floatNode field is set. + */ + public boolean hasFloatNode() { + return ((bitField0_ & 0x00000080) != 0); + } + /** + * optional .gandiva.types.FloatNode floatNode = 13; + * @return The floatNode. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode getFloatNode() { + if (floatNodeBuilder_ == null) { + return floatNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.getDefaultInstance() : floatNode_; + } else { + return floatNodeBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.FloatNode floatNode = 13; + */ + public Builder setFloatNode(org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode value) { + if (floatNodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + floatNode_ = value; + } else { + floatNodeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000080; + onChanged(); + return this; + } + /** + * optional .gandiva.types.FloatNode floatNode = 13; + */ + public Builder setFloatNode( + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.Builder builderForValue) { + if (floatNodeBuilder_ == null) { + floatNode_ = builderForValue.build(); + } else { + floatNodeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000080; + onChanged(); + return this; + } + /** + * optional .gandiva.types.FloatNode floatNode = 13; + */ + public Builder mergeFloatNode(org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode value) { + if (floatNodeBuilder_ == null) { + if (((bitField0_ & 0x00000080) != 0) && + floatNode_ != null && + floatNode_ != org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.getDefaultInstance()) { + getFloatNodeBuilder().mergeFrom(value); + } else { + floatNode_ = value; + } + } else { + floatNodeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000080; + onChanged(); + return this; + } + /** + * optional .gandiva.types.FloatNode floatNode = 13; + */ + public Builder clearFloatNode() { + bitField0_ = (bitField0_ & ~0x00000080); + floatNode_ = null; + if (floatNodeBuilder_ != null) { + floatNodeBuilder_.dispose(); + floatNodeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.FloatNode floatNode = 13; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.Builder getFloatNodeBuilder() { + bitField0_ |= 0x00000080; + onChanged(); + return getFloatNodeFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.FloatNode floatNode = 13; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNodeOrBuilder getFloatNodeOrBuilder() { + if (floatNodeBuilder_ != null) { + return floatNodeBuilder_.getMessageOrBuilder(); + } else { + return floatNode_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.getDefaultInstance() : floatNode_; + } + } + /** + * optional .gandiva.types.FloatNode floatNode = 13; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNodeOrBuilder> + getFloatNodeFieldBuilder() { + if (floatNodeBuilder_ == null) { + floatNodeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNodeOrBuilder>( + getFloatNode(), + getParentForChildren(), + isClean()); + floatNode_ = null; + } + return floatNodeBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode longNode_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode, org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.LongNodeOrBuilder> longNodeBuilder_; + /** + * optional .gandiva.types.LongNode longNode = 14; + * @return Whether the longNode field is set. + */ + public boolean hasLongNode() { + return ((bitField0_ & 0x00000100) != 0); + } + /** + * optional .gandiva.types.LongNode longNode = 14; + * @return The longNode. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode getLongNode() { + if (longNodeBuilder_ == null) { + return longNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.getDefaultInstance() : longNode_; + } else { + return longNodeBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.LongNode longNode = 14; + */ + public Builder setLongNode(org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode value) { + if (longNodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + longNode_ = value; + } else { + longNodeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000100; + onChanged(); + return this; + } + /** + * optional .gandiva.types.LongNode longNode = 14; + */ + public Builder setLongNode( + org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.Builder builderForValue) { + if (longNodeBuilder_ == null) { + longNode_ = builderForValue.build(); + } else { + longNodeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000100; + onChanged(); + return this; + } + /** + * optional .gandiva.types.LongNode longNode = 14; + */ + public Builder mergeLongNode(org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode value) { + if (longNodeBuilder_ == null) { + if (((bitField0_ & 0x00000100) != 0) && + longNode_ != null && + longNode_ != org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.getDefaultInstance()) { + getLongNodeBuilder().mergeFrom(value); + } else { + longNode_ = value; + } + } else { + longNodeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000100; + onChanged(); + return this; + } + /** + * optional .gandiva.types.LongNode longNode = 14; + */ + public Builder clearLongNode() { + bitField0_ = (bitField0_ & ~0x00000100); + longNode_ = null; + if (longNodeBuilder_ != null) { + longNodeBuilder_.dispose(); + longNodeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.LongNode longNode = 14; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.Builder getLongNodeBuilder() { + bitField0_ |= 0x00000100; + onChanged(); + return getLongNodeFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.LongNode longNode = 14; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongNodeOrBuilder getLongNodeOrBuilder() { + if (longNodeBuilder_ != null) { + return longNodeBuilder_.getMessageOrBuilder(); + } else { + return longNode_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.getDefaultInstance() : longNode_; + } + } + /** + * optional .gandiva.types.LongNode longNode = 14; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode, org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.LongNodeOrBuilder> + getLongNodeFieldBuilder() { + if (longNodeBuilder_ == null) { + longNodeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode, org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.LongNodeOrBuilder>( + getLongNode(), + getParentForChildren(), + isClean()); + longNode_ = null; + } + return longNodeBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode booleanNode_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode, org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNodeOrBuilder> booleanNodeBuilder_; + /** + * optional .gandiva.types.BooleanNode booleanNode = 15; + * @return Whether the booleanNode field is set. + */ + public boolean hasBooleanNode() { + return ((bitField0_ & 0x00000200) != 0); + } + /** + * optional .gandiva.types.BooleanNode booleanNode = 15; + * @return The booleanNode. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode getBooleanNode() { + if (booleanNodeBuilder_ == null) { + return booleanNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode.getDefaultInstance() : booleanNode_; + } else { + return booleanNodeBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.BooleanNode booleanNode = 15; + */ + public Builder setBooleanNode(org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode value) { + if (booleanNodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + booleanNode_ = value; + } else { + booleanNodeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000200; + onChanged(); + return this; + } + /** + * optional .gandiva.types.BooleanNode booleanNode = 15; + */ + public Builder setBooleanNode( + org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode.Builder builderForValue) { + if (booleanNodeBuilder_ == null) { + booleanNode_ = builderForValue.build(); + } else { + booleanNodeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000200; + onChanged(); + return this; + } + /** + * optional .gandiva.types.BooleanNode booleanNode = 15; + */ + public Builder mergeBooleanNode(org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode value) { + if (booleanNodeBuilder_ == null) { + if (((bitField0_ & 0x00000200) != 0) && + booleanNode_ != null && + booleanNode_ != org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode.getDefaultInstance()) { + getBooleanNodeBuilder().mergeFrom(value); + } else { + booleanNode_ = value; + } + } else { + booleanNodeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000200; + onChanged(); + return this; + } + /** + * optional .gandiva.types.BooleanNode booleanNode = 15; + */ + public Builder clearBooleanNode() { + bitField0_ = (bitField0_ & ~0x00000200); + booleanNode_ = null; + if (booleanNodeBuilder_ != null) { + booleanNodeBuilder_.dispose(); + booleanNodeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.BooleanNode booleanNode = 15; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode.Builder getBooleanNodeBuilder() { + bitField0_ |= 0x00000200; + onChanged(); + return getBooleanNodeFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.BooleanNode booleanNode = 15; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNodeOrBuilder getBooleanNodeOrBuilder() { + if (booleanNodeBuilder_ != null) { + return booleanNodeBuilder_.getMessageOrBuilder(); + } else { + return booleanNode_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode.getDefaultInstance() : booleanNode_; + } + } + /** + * optional .gandiva.types.BooleanNode booleanNode = 15; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode, org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNodeOrBuilder> + getBooleanNodeFieldBuilder() { + if (booleanNodeBuilder_ == null) { + booleanNodeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode, org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.BooleanNodeOrBuilder>( + getBooleanNode(), + getParentForChildren(), + isClean()); + booleanNode_ = null; + } + return booleanNodeBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode doubleNode_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNodeOrBuilder> doubleNodeBuilder_; + /** + * optional .gandiva.types.DoubleNode doubleNode = 16; + * @return Whether the doubleNode field is set. + */ + public boolean hasDoubleNode() { + return ((bitField0_ & 0x00000400) != 0); + } + /** + * optional .gandiva.types.DoubleNode doubleNode = 16; + * @return The doubleNode. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode getDoubleNode() { + if (doubleNodeBuilder_ == null) { + return doubleNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.getDefaultInstance() : doubleNode_; + } else { + return doubleNodeBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.DoubleNode doubleNode = 16; + */ + public Builder setDoubleNode(org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode value) { + if (doubleNodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + doubleNode_ = value; + } else { + doubleNodeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000400; + onChanged(); + return this; + } + /** + * optional .gandiva.types.DoubleNode doubleNode = 16; + */ + public Builder setDoubleNode( + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.Builder builderForValue) { + if (doubleNodeBuilder_ == null) { + doubleNode_ = builderForValue.build(); + } else { + doubleNodeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000400; + onChanged(); + return this; + } + /** + * optional .gandiva.types.DoubleNode doubleNode = 16; + */ + public Builder mergeDoubleNode(org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode value) { + if (doubleNodeBuilder_ == null) { + if (((bitField0_ & 0x00000400) != 0) && + doubleNode_ != null && + doubleNode_ != org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.getDefaultInstance()) { + getDoubleNodeBuilder().mergeFrom(value); + } else { + doubleNode_ = value; + } + } else { + doubleNodeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000400; + onChanged(); + return this; + } + /** + * optional .gandiva.types.DoubleNode doubleNode = 16; + */ + public Builder clearDoubleNode() { + bitField0_ = (bitField0_ & ~0x00000400); + doubleNode_ = null; + if (doubleNodeBuilder_ != null) { + doubleNodeBuilder_.dispose(); + doubleNodeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.DoubleNode doubleNode = 16; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.Builder getDoubleNodeBuilder() { + bitField0_ |= 0x00000400; + onChanged(); + return getDoubleNodeFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.DoubleNode doubleNode = 16; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNodeOrBuilder getDoubleNodeOrBuilder() { + if (doubleNodeBuilder_ != null) { + return doubleNodeBuilder_.getMessageOrBuilder(); + } else { + return doubleNode_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.getDefaultInstance() : doubleNode_; + } + } + /** + * optional .gandiva.types.DoubleNode doubleNode = 16; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNodeOrBuilder> + getDoubleNodeFieldBuilder() { + if (doubleNodeBuilder_ == null) { + doubleNodeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNodeOrBuilder>( + getDoubleNode(), + getParentForChildren(), + isClean()); + doubleNode_ = null; + } + return doubleNodeBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode stringNode_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode, org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.StringNodeOrBuilder> stringNodeBuilder_; + /** + * optional .gandiva.types.StringNode stringNode = 17; + * @return Whether the stringNode field is set. + */ + public boolean hasStringNode() { + return ((bitField0_ & 0x00000800) != 0); + } + /** + * optional .gandiva.types.StringNode stringNode = 17; + * @return The stringNode. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode getStringNode() { + if (stringNodeBuilder_ == null) { + return stringNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.getDefaultInstance() : stringNode_; + } else { + return stringNodeBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.StringNode stringNode = 17; + */ + public Builder setStringNode(org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode value) { + if (stringNodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + stringNode_ = value; + } else { + stringNodeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000800; + onChanged(); + return this; + } + /** + * optional .gandiva.types.StringNode stringNode = 17; + */ + public Builder setStringNode( + org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.Builder builderForValue) { + if (stringNodeBuilder_ == null) { + stringNode_ = builderForValue.build(); + } else { + stringNodeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000800; + onChanged(); + return this; + } + /** + * optional .gandiva.types.StringNode stringNode = 17; + */ + public Builder mergeStringNode(org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode value) { + if (stringNodeBuilder_ == null) { + if (((bitField0_ & 0x00000800) != 0) && + stringNode_ != null && + stringNode_ != org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.getDefaultInstance()) { + getStringNodeBuilder().mergeFrom(value); + } else { + stringNode_ = value; + } + } else { + stringNodeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000800; + onChanged(); + return this; + } + /** + * optional .gandiva.types.StringNode stringNode = 17; + */ + public Builder clearStringNode() { + bitField0_ = (bitField0_ & ~0x00000800); + stringNode_ = null; + if (stringNodeBuilder_ != null) { + stringNodeBuilder_.dispose(); + stringNodeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.StringNode stringNode = 17; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.Builder getStringNodeBuilder() { + bitField0_ |= 0x00000800; + onChanged(); + return getStringNodeFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.StringNode stringNode = 17; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringNodeOrBuilder getStringNodeOrBuilder() { + if (stringNodeBuilder_ != null) { + return stringNodeBuilder_.getMessageOrBuilder(); + } else { + return stringNode_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.getDefaultInstance() : stringNode_; + } + } + /** + * optional .gandiva.types.StringNode stringNode = 17; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode, org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.StringNodeOrBuilder> + getStringNodeFieldBuilder() { + if (stringNodeBuilder_ == null) { + stringNodeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode, org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.StringNodeOrBuilder>( + getStringNode(), + getParentForChildren(), + isClean()); + stringNode_ = null; + } + return stringNodeBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode binaryNode_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNodeOrBuilder> binaryNodeBuilder_; + /** + * optional .gandiva.types.BinaryNode binaryNode = 18; + * @return Whether the binaryNode field is set. + */ + public boolean hasBinaryNode() { + return ((bitField0_ & 0x00001000) != 0); + } + /** + * optional .gandiva.types.BinaryNode binaryNode = 18; + * @return The binaryNode. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode getBinaryNode() { + if (binaryNodeBuilder_ == null) { + return binaryNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.getDefaultInstance() : binaryNode_; + } else { + return binaryNodeBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.BinaryNode binaryNode = 18; + */ + public Builder setBinaryNode(org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode value) { + if (binaryNodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + binaryNode_ = value; + } else { + binaryNodeBuilder_.setMessage(value); + } + bitField0_ |= 0x00001000; + onChanged(); + return this; + } + /** + * optional .gandiva.types.BinaryNode binaryNode = 18; + */ + public Builder setBinaryNode( + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.Builder builderForValue) { + if (binaryNodeBuilder_ == null) { + binaryNode_ = builderForValue.build(); + } else { + binaryNodeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00001000; + onChanged(); + return this; + } + /** + * optional .gandiva.types.BinaryNode binaryNode = 18; + */ + public Builder mergeBinaryNode(org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode value) { + if (binaryNodeBuilder_ == null) { + if (((bitField0_ & 0x00001000) != 0) && + binaryNode_ != null && + binaryNode_ != org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.getDefaultInstance()) { + getBinaryNodeBuilder().mergeFrom(value); + } else { + binaryNode_ = value; + } + } else { + binaryNodeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00001000; + onChanged(); + return this; + } + /** + * optional .gandiva.types.BinaryNode binaryNode = 18; + */ + public Builder clearBinaryNode() { + bitField0_ = (bitField0_ & ~0x00001000); + binaryNode_ = null; + if (binaryNodeBuilder_ != null) { + binaryNodeBuilder_.dispose(); + binaryNodeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.BinaryNode binaryNode = 18; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.Builder getBinaryNodeBuilder() { + bitField0_ |= 0x00001000; + onChanged(); + return getBinaryNodeFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.BinaryNode binaryNode = 18; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNodeOrBuilder getBinaryNodeOrBuilder() { + if (binaryNodeBuilder_ != null) { + return binaryNodeBuilder_.getMessageOrBuilder(); + } else { + return binaryNode_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.getDefaultInstance() : binaryNode_; + } + } + /** + * optional .gandiva.types.BinaryNode binaryNode = 18; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNodeOrBuilder> + getBinaryNodeFieldBuilder() { + if (binaryNodeBuilder_ == null) { + binaryNodeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNodeOrBuilder>( + getBinaryNode(), + getParentForChildren(), + isClean()); + binaryNode_ = null; + } + return binaryNodeBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode decimalNode_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNodeOrBuilder> decimalNodeBuilder_; + /** + * optional .gandiva.types.DecimalNode decimalNode = 19; + * @return Whether the decimalNode field is set. + */ + public boolean hasDecimalNode() { + return ((bitField0_ & 0x00002000) != 0); + } + /** + * optional .gandiva.types.DecimalNode decimalNode = 19; + * @return The decimalNode. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode getDecimalNode() { + if (decimalNodeBuilder_ == null) { + return decimalNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.getDefaultInstance() : decimalNode_; + } else { + return decimalNodeBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.DecimalNode decimalNode = 19; + */ + public Builder setDecimalNode(org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode value) { + if (decimalNodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + decimalNode_ = value; + } else { + decimalNodeBuilder_.setMessage(value); + } + bitField0_ |= 0x00002000; + onChanged(); + return this; + } + /** + * optional .gandiva.types.DecimalNode decimalNode = 19; + */ + public Builder setDecimalNode( + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.Builder builderForValue) { + if (decimalNodeBuilder_ == null) { + decimalNode_ = builderForValue.build(); + } else { + decimalNodeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00002000; + onChanged(); + return this; + } + /** + * optional .gandiva.types.DecimalNode decimalNode = 19; + */ + public Builder mergeDecimalNode(org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode value) { + if (decimalNodeBuilder_ == null) { + if (((bitField0_ & 0x00002000) != 0) && + decimalNode_ != null && + decimalNode_ != org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.getDefaultInstance()) { + getDecimalNodeBuilder().mergeFrom(value); + } else { + decimalNode_ = value; + } + } else { + decimalNodeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00002000; + onChanged(); + return this; + } + /** + * optional .gandiva.types.DecimalNode decimalNode = 19; + */ + public Builder clearDecimalNode() { + bitField0_ = (bitField0_ & ~0x00002000); + decimalNode_ = null; + if (decimalNodeBuilder_ != null) { + decimalNodeBuilder_.dispose(); + decimalNodeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.DecimalNode decimalNode = 19; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.Builder getDecimalNodeBuilder() { + bitField0_ |= 0x00002000; + onChanged(); + return getDecimalNodeFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.DecimalNode decimalNode = 19; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNodeOrBuilder getDecimalNodeOrBuilder() { + if (decimalNodeBuilder_ != null) { + return decimalNodeBuilder_.getMessageOrBuilder(); + } else { + return decimalNode_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.getDefaultInstance() : decimalNode_; + } + } + /** + * optional .gandiva.types.DecimalNode decimalNode = 19; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNodeOrBuilder> + getDecimalNodeFieldBuilder() { + if (decimalNodeBuilder_ == null) { + decimalNodeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNodeOrBuilder>( + getDecimalNode(), + getParentForChildren(), + isClean()); + decimalNode_ = null; + } + return decimalNodeBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.InNode inNode_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.InNode, org.apache.arrow.gandiva.ipc.GandivaTypes.InNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.InNodeOrBuilder> inNodeBuilder_; + /** + *
+       * in expr
+       * 
+ * + * optional .gandiva.types.InNode inNode = 21; + * @return Whether the inNode field is set. + */ + public boolean hasInNode() { + return ((bitField0_ & 0x00004000) != 0); + } + /** + *
+       * in expr
+       * 
+ * + * optional .gandiva.types.InNode inNode = 21; + * @return The inNode. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.InNode getInNode() { + if (inNodeBuilder_ == null) { + return inNode_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.InNode.getDefaultInstance() : inNode_; + } else { + return inNodeBuilder_.getMessage(); + } + } + /** + *
+       * in expr
+       * 
+ * + * optional .gandiva.types.InNode inNode = 21; + */ + public Builder setInNode(org.apache.arrow.gandiva.ipc.GandivaTypes.InNode value) { + if (inNodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + inNode_ = value; + } else { + inNodeBuilder_.setMessage(value); + } + bitField0_ |= 0x00004000; + onChanged(); + return this; + } + /** + *
+       * in expr
+       * 
+ * + * optional .gandiva.types.InNode inNode = 21; + */ + public Builder setInNode( + org.apache.arrow.gandiva.ipc.GandivaTypes.InNode.Builder builderForValue) { + if (inNodeBuilder_ == null) { + inNode_ = builderForValue.build(); + } else { + inNodeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00004000; + onChanged(); + return this; + } + /** + *
+       * in expr
+       * 
+ * + * optional .gandiva.types.InNode inNode = 21; + */ + public Builder mergeInNode(org.apache.arrow.gandiva.ipc.GandivaTypes.InNode value) { + if (inNodeBuilder_ == null) { + if (((bitField0_ & 0x00004000) != 0) && + inNode_ != null && + inNode_ != org.apache.arrow.gandiva.ipc.GandivaTypes.InNode.getDefaultInstance()) { + getInNodeBuilder().mergeFrom(value); + } else { + inNode_ = value; + } + } else { + inNodeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00004000; + onChanged(); + return this; + } + /** + *
+       * in expr
+       * 
+ * + * optional .gandiva.types.InNode inNode = 21; + */ + public Builder clearInNode() { + bitField0_ = (bitField0_ & ~0x00004000); + inNode_ = null; + if (inNodeBuilder_ != null) { + inNodeBuilder_.dispose(); + inNodeBuilder_ = null; + } + onChanged(); + return this; + } + /** + *
+       * in expr
+       * 
+ * + * optional .gandiva.types.InNode inNode = 21; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.InNode.Builder getInNodeBuilder() { + bitField0_ |= 0x00004000; + onChanged(); + return getInNodeFieldBuilder().getBuilder(); + } + /** + *
+       * in expr
+       * 
+ * + * optional .gandiva.types.InNode inNode = 21; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.InNodeOrBuilder getInNodeOrBuilder() { + if (inNodeBuilder_ != null) { + return inNodeBuilder_.getMessageOrBuilder(); + } else { + return inNode_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.InNode.getDefaultInstance() : inNode_; + } + } + /** + *
+       * in expr
+       * 
+ * + * optional .gandiva.types.InNode inNode = 21; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.InNode, org.apache.arrow.gandiva.ipc.GandivaTypes.InNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.InNodeOrBuilder> + getInNodeFieldBuilder() { + if (inNodeBuilder_ == null) { + inNodeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.InNode, org.apache.arrow.gandiva.ipc.GandivaTypes.InNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.InNodeOrBuilder>( + getInNode(), + getParentForChildren(), + isClean()); + inNode_ = null; + } + return inNodeBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.TreeNode) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.TreeNode) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public TreeNode parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface ExpressionRootOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.ExpressionRoot) + com.google.protobuf.MessageOrBuilder { + + /** + * optional .gandiva.types.TreeNode root = 1; + * @return Whether the root field is set. + */ + boolean hasRoot(); + /** + * optional .gandiva.types.TreeNode root = 1; + * @return The root. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getRoot(); + /** + * optional .gandiva.types.TreeNode root = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getRootOrBuilder(); + + /** + * optional .gandiva.types.Field resultType = 2; + * @return Whether the resultType field is set. + */ + boolean hasResultType(); + /** + * optional .gandiva.types.Field resultType = 2; + * @return The resultType. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.Field getResultType(); + /** + * optional .gandiva.types.Field resultType = 2; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder getResultTypeOrBuilder(); + } + /** + * Protobuf type {@code gandiva.types.ExpressionRoot} + */ + public static final class ExpressionRoot extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.ExpressionRoot) + ExpressionRootOrBuilder { + private static final long serialVersionUID = 0L; + // Use ExpressionRoot.newBuilder() to construct. + private ExpressionRoot(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ExpressionRoot() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new ExpressionRoot(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_ExpressionRoot_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_ExpressionRoot_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot.class, org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot.Builder.class); + } + + private int bitField0_; + public static final int ROOT_FIELD_NUMBER = 1; + private org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode root_; + /** + * optional .gandiva.types.TreeNode root = 1; + * @return Whether the root field is set. + */ + @java.lang.Override + public boolean hasRoot() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional .gandiva.types.TreeNode root = 1; + * @return The root. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getRoot() { + return root_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : root_; + } + /** + * optional .gandiva.types.TreeNode root = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getRootOrBuilder() { + return root_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : root_; + } + + public static final int RESULTTYPE_FIELD_NUMBER = 2; + private org.apache.arrow.gandiva.ipc.GandivaTypes.Field resultType_; + /** + * optional .gandiva.types.Field resultType = 2; + * @return Whether the resultType field is set. + */ + @java.lang.Override + public boolean hasResultType() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * optional .gandiva.types.Field resultType = 2; + * @return The resultType. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.Field getResultType() { + return resultType_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.Field.getDefaultInstance() : resultType_; + } + /** + * optional .gandiva.types.Field resultType = 2; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder getResultTypeOrBuilder() { + return resultType_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.Field.getDefaultInstance() : resultType_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + output.writeMessage(1, getRoot()); + } + if (((bitField0_ & 0x00000002) != 0)) { + output.writeMessage(2, getResultType()); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getRoot()); + } + if (((bitField0_ & 0x00000002) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, getResultType()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot other = (org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot) obj; + + if (hasRoot() != other.hasRoot()) return false; + if (hasRoot()) { + if (!getRoot() + .equals(other.getRoot())) return false; + } + if (hasResultType() != other.hasResultType()) return false; + if (hasResultType()) { + if (!getResultType() + .equals(other.getResultType())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasRoot()) { + hash = (37 * hash) + ROOT_FIELD_NUMBER; + hash = (53 * hash) + getRoot().hashCode(); + } + if (hasResultType()) { + hash = (37 * hash) + RESULTTYPE_FIELD_NUMBER; + hash = (53 * hash) + getResultType().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.ExpressionRoot} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.ExpressionRoot) + org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRootOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_ExpressionRoot_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_ExpressionRoot_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot.class, org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getRootFieldBuilder(); + getResultTypeFieldBuilder(); + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + root_ = null; + if (rootBuilder_ != null) { + rootBuilder_.dispose(); + rootBuilder_ = null; + } + resultType_ = null; + if (resultTypeBuilder_ != null) { + resultTypeBuilder_.dispose(); + resultTypeBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_ExpressionRoot_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot result = new org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.root_ = rootBuilder_ == null + ? root_ + : rootBuilder_.build(); + to_bitField0_ |= 0x00000001; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.resultType_ = resultTypeBuilder_ == null + ? resultType_ + : resultTypeBuilder_.build(); + to_bitField0_ |= 0x00000002; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot.getDefaultInstance()) return this; + if (other.hasRoot()) { + mergeRoot(other.getRoot()); + } + if (other.hasResultType()) { + mergeResultType(other.getResultType()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + input.readMessage( + getRootFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + input.readMessage( + getResultTypeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000002; + break; + } // case 18 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode root_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder> rootBuilder_; + /** + * optional .gandiva.types.TreeNode root = 1; + * @return Whether the root field is set. + */ + public boolean hasRoot() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional .gandiva.types.TreeNode root = 1; + * @return The root. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getRoot() { + if (rootBuilder_ == null) { + return root_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : root_; + } else { + return rootBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.TreeNode root = 1; + */ + public Builder setRoot(org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode value) { + if (rootBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + root_ = value; + } else { + rootBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode root = 1; + */ + public Builder setRoot( + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder builderForValue) { + if (rootBuilder_ == null) { + root_ = builderForValue.build(); + } else { + rootBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode root = 1; + */ + public Builder mergeRoot(org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode value) { + if (rootBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0) && + root_ != null && + root_ != org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance()) { + getRootBuilder().mergeFrom(value); + } else { + root_ = value; + } + } else { + rootBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode root = 1; + */ + public Builder clearRoot() { + bitField0_ = (bitField0_ & ~0x00000001); + root_ = null; + if (rootBuilder_ != null) { + rootBuilder_.dispose(); + rootBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode root = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder getRootBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRootFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.TreeNode root = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getRootOrBuilder() { + if (rootBuilder_ != null) { + return rootBuilder_.getMessageOrBuilder(); + } else { + return root_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : root_; + } + } + /** + * optional .gandiva.types.TreeNode root = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder> + getRootFieldBuilder() { + if (rootBuilder_ == null) { + rootBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder>( + getRoot(), + getParentForChildren(), + isClean()); + root_ = null; + } + return rootBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.Field resultType_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.Field, org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder> resultTypeBuilder_; + /** + * optional .gandiva.types.Field resultType = 2; + * @return Whether the resultType field is set. + */ + public boolean hasResultType() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * optional .gandiva.types.Field resultType = 2; + * @return The resultType. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.Field getResultType() { + if (resultTypeBuilder_ == null) { + return resultType_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.Field.getDefaultInstance() : resultType_; + } else { + return resultTypeBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.Field resultType = 2; + */ + public Builder setResultType(org.apache.arrow.gandiva.ipc.GandivaTypes.Field value) { + if (resultTypeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + resultType_ = value; + } else { + resultTypeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * optional .gandiva.types.Field resultType = 2; + */ + public Builder setResultType( + org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder builderForValue) { + if (resultTypeBuilder_ == null) { + resultType_ = builderForValue.build(); + } else { + resultTypeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * optional .gandiva.types.Field resultType = 2; + */ + public Builder mergeResultType(org.apache.arrow.gandiva.ipc.GandivaTypes.Field value) { + if (resultTypeBuilder_ == null) { + if (((bitField0_ & 0x00000002) != 0) && + resultType_ != null && + resultType_ != org.apache.arrow.gandiva.ipc.GandivaTypes.Field.getDefaultInstance()) { + getResultTypeBuilder().mergeFrom(value); + } else { + resultType_ = value; + } + } else { + resultTypeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * optional .gandiva.types.Field resultType = 2; + */ + public Builder clearResultType() { + bitField0_ = (bitField0_ & ~0x00000002); + resultType_ = null; + if (resultTypeBuilder_ != null) { + resultTypeBuilder_.dispose(); + resultTypeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.Field resultType = 2; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder getResultTypeBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getResultTypeFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.Field resultType = 2; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder getResultTypeOrBuilder() { + if (resultTypeBuilder_ != null) { + return resultTypeBuilder_.getMessageOrBuilder(); + } else { + return resultType_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.Field.getDefaultInstance() : resultType_; + } + } + /** + * optional .gandiva.types.Field resultType = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.Field, org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder> + getResultTypeFieldBuilder() { + if (resultTypeBuilder_ == null) { + resultTypeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.Field, org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder>( + getResultType(), + getParentForChildren(), + isClean()); + resultType_ = null; + } + return resultTypeBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.ExpressionRoot) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.ExpressionRoot) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ExpressionRoot parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface ExpressionListOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.ExpressionList) + com.google.protobuf.MessageOrBuilder { + + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + java.util.List + getExprsList(); + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot getExprs(int index); + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + int getExprsCount(); + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + java.util.List + getExprsOrBuilderList(); + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRootOrBuilder getExprsOrBuilder( + int index); + } + /** + * Protobuf type {@code gandiva.types.ExpressionList} + */ + public static final class ExpressionList extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.ExpressionList) + ExpressionListOrBuilder { + private static final long serialVersionUID = 0L; + // Use ExpressionList.newBuilder() to construct. + private ExpressionList(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ExpressionList() { + exprs_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new ExpressionList(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_ExpressionList_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_ExpressionList_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList.class, org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList.Builder.class); + } + + public static final int EXPRS_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private java.util.List exprs_; + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + @java.lang.Override + public java.util.List getExprsList() { + return exprs_; + } + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + @java.lang.Override + public java.util.List + getExprsOrBuilderList() { + return exprs_; + } + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + @java.lang.Override + public int getExprsCount() { + return exprs_.size(); + } + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot getExprs(int index) { + return exprs_.get(index); + } + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRootOrBuilder getExprsOrBuilder( + int index) { + return exprs_.get(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < exprs_.size(); i++) { + output.writeMessage(2, exprs_.get(i)); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < exprs_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, exprs_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList other = (org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList) obj; + + if (!getExprsList() + .equals(other.getExprsList())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getExprsCount() > 0) { + hash = (37 * hash) + EXPRS_FIELD_NUMBER; + hash = (53 * hash) + getExprsList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.ExpressionList} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.ExpressionList) + org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionListOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_ExpressionList_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_ExpressionList_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList.class, org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + if (exprsBuilder_ == null) { + exprs_ = java.util.Collections.emptyList(); + } else { + exprs_ = null; + exprsBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_ExpressionList_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList result = new org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields(org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList result) { + if (exprsBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + exprs_ = java.util.Collections.unmodifiableList(exprs_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.exprs_ = exprs_; + } else { + result.exprs_ = exprsBuilder_.build(); + } + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList result) { + int from_bitField0_ = bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList.getDefaultInstance()) return this; + if (exprsBuilder_ == null) { + if (!other.exprs_.isEmpty()) { + if (exprs_.isEmpty()) { + exprs_ = other.exprs_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureExprsIsMutable(); + exprs_.addAll(other.exprs_); + } + onChanged(); + } + } else { + if (!other.exprs_.isEmpty()) { + if (exprsBuilder_.isEmpty()) { + exprsBuilder_.dispose(); + exprsBuilder_ = null; + exprs_ = other.exprs_; + bitField0_ = (bitField0_ & ~0x00000001); + exprsBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getExprsFieldBuilder() : null; + } else { + exprsBuilder_.addAllMessages(other.exprs_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 18: { + org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot m = + input.readMessage( + org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot.parser(), + extensionRegistry); + if (exprsBuilder_ == null) { + ensureExprsIsMutable(); + exprs_.add(m); + } else { + exprsBuilder_.addMessage(m); + } + break; + } // case 18 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.util.List exprs_ = + java.util.Collections.emptyList(); + private void ensureExprsIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + exprs_ = new java.util.ArrayList(exprs_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot, org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRootOrBuilder> exprsBuilder_; + + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + public java.util.List getExprsList() { + if (exprsBuilder_ == null) { + return java.util.Collections.unmodifiableList(exprs_); + } else { + return exprsBuilder_.getMessageList(); + } + } + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + public int getExprsCount() { + if (exprsBuilder_ == null) { + return exprs_.size(); + } else { + return exprsBuilder_.getCount(); + } + } + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot getExprs(int index) { + if (exprsBuilder_ == null) { + return exprs_.get(index); + } else { + return exprsBuilder_.getMessage(index); + } + } + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + public Builder setExprs( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot value) { + if (exprsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureExprsIsMutable(); + exprs_.set(index, value); + onChanged(); + } else { + exprsBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + public Builder setExprs( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot.Builder builderForValue) { + if (exprsBuilder_ == null) { + ensureExprsIsMutable(); + exprs_.set(index, builderForValue.build()); + onChanged(); + } else { + exprsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + public Builder addExprs(org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot value) { + if (exprsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureExprsIsMutable(); + exprs_.add(value); + onChanged(); + } else { + exprsBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + public Builder addExprs( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot value) { + if (exprsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureExprsIsMutable(); + exprs_.add(index, value); + onChanged(); + } else { + exprsBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + public Builder addExprs( + org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot.Builder builderForValue) { + if (exprsBuilder_ == null) { + ensureExprsIsMutable(); + exprs_.add(builderForValue.build()); + onChanged(); + } else { + exprsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + public Builder addExprs( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot.Builder builderForValue) { + if (exprsBuilder_ == null) { + ensureExprsIsMutable(); + exprs_.add(index, builderForValue.build()); + onChanged(); + } else { + exprsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + public Builder addAllExprs( + java.lang.Iterable values) { + if (exprsBuilder_ == null) { + ensureExprsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, exprs_); + onChanged(); + } else { + exprsBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + public Builder clearExprs() { + if (exprsBuilder_ == null) { + exprs_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + exprsBuilder_.clear(); + } + return this; + } + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + public Builder removeExprs(int index) { + if (exprsBuilder_ == null) { + ensureExprsIsMutable(); + exprs_.remove(index); + onChanged(); + } else { + exprsBuilder_.remove(index); + } + return this; + } + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot.Builder getExprsBuilder( + int index) { + return getExprsFieldBuilder().getBuilder(index); + } + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRootOrBuilder getExprsOrBuilder( + int index) { + if (exprsBuilder_ == null) { + return exprs_.get(index); } else { + return exprsBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + public java.util.List + getExprsOrBuilderList() { + if (exprsBuilder_ != null) { + return exprsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(exprs_); + } + } + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot.Builder addExprsBuilder() { + return getExprsFieldBuilder().addBuilder( + org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot.getDefaultInstance()); + } + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot.Builder addExprsBuilder( + int index) { + return getExprsFieldBuilder().addBuilder( + index, org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot.getDefaultInstance()); + } + /** + * repeated .gandiva.types.ExpressionRoot exprs = 2; + */ + public java.util.List + getExprsBuilderList() { + return getExprsFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot, org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRootOrBuilder> + getExprsFieldBuilder() { + if (exprsBuilder_ == null) { + exprsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot, org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRoot.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionRootOrBuilder>( + exprs_, + ((bitField0_ & 0x00000001) != 0), + getParentForChildren(), + isClean()); + exprs_ = null; + } + return exprsBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.ExpressionList) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.ExpressionList) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ExpressionList parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExpressionList getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface ConditionOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.Condition) + com.google.protobuf.MessageOrBuilder { + + /** + * optional .gandiva.types.TreeNode root = 1; + * @return Whether the root field is set. + */ + boolean hasRoot(); + /** + * optional .gandiva.types.TreeNode root = 1; + * @return The root. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getRoot(); + /** + * optional .gandiva.types.TreeNode root = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getRootOrBuilder(); + } + /** + * Protobuf type {@code gandiva.types.Condition} + */ + public static final class Condition extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.Condition) + ConditionOrBuilder { + private static final long serialVersionUID = 0L; + // Use Condition.newBuilder() to construct. + private Condition(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Condition() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new Condition(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_Condition_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_Condition_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.Condition.class, org.apache.arrow.gandiva.ipc.GandivaTypes.Condition.Builder.class); + } + + private int bitField0_; + public static final int ROOT_FIELD_NUMBER = 1; + private org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode root_; + /** + * optional .gandiva.types.TreeNode root = 1; + * @return Whether the root field is set. + */ + @java.lang.Override + public boolean hasRoot() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional .gandiva.types.TreeNode root = 1; + * @return The root. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getRoot() { + return root_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : root_; + } + /** + * optional .gandiva.types.TreeNode root = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getRootOrBuilder() { + return root_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : root_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + output.writeMessage(1, getRoot()); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getRoot()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.Condition)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.Condition other = (org.apache.arrow.gandiva.ipc.GandivaTypes.Condition) obj; + + if (hasRoot() != other.hasRoot()) return false; + if (hasRoot()) { + if (!getRoot() + .equals(other.getRoot())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasRoot()) { + hash = (37 * hash) + ROOT_FIELD_NUMBER; + hash = (53 * hash) + getRoot().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Condition parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Condition parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Condition parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Condition parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Condition parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Condition parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Condition parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Condition parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Condition parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Condition parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Condition parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Condition parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.Condition prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.Condition} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.Condition) + org.apache.arrow.gandiva.ipc.GandivaTypes.ConditionOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_Condition_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_Condition_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.Condition.class, org.apache.arrow.gandiva.ipc.GandivaTypes.Condition.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.Condition.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getRootFieldBuilder(); + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + root_ = null; + if (rootBuilder_ != null) { + rootBuilder_.dispose(); + rootBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_Condition_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.Condition getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.Condition.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.Condition build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.Condition result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.Condition buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.Condition result = new org.apache.arrow.gandiva.ipc.GandivaTypes.Condition(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.Condition result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.root_ = rootBuilder_ == null + ? root_ + : rootBuilder_.build(); + to_bitField0_ |= 0x00000001; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.Condition) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.Condition)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.Condition other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.Condition.getDefaultInstance()) return this; + if (other.hasRoot()) { + mergeRoot(other.getRoot()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + input.readMessage( + getRootFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000001; + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode root_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder> rootBuilder_; + /** + * optional .gandiva.types.TreeNode root = 1; + * @return Whether the root field is set. + */ + public boolean hasRoot() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional .gandiva.types.TreeNode root = 1; + * @return The root. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getRoot() { + if (rootBuilder_ == null) { + return root_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : root_; + } else { + return rootBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.TreeNode root = 1; + */ + public Builder setRoot(org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode value) { + if (rootBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + root_ = value; + } else { + rootBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode root = 1; + */ + public Builder setRoot( + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder builderForValue) { + if (rootBuilder_ == null) { + root_ = builderForValue.build(); + } else { + rootBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode root = 1; + */ + public Builder mergeRoot(org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode value) { + if (rootBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0) && + root_ != null && + root_ != org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance()) { + getRootBuilder().mergeFrom(value); + } else { + root_ = value; + } + } else { + rootBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode root = 1; + */ + public Builder clearRoot() { + bitField0_ = (bitField0_ & ~0x00000001); + root_ = null; + if (rootBuilder_ != null) { + rootBuilder_.dispose(); + rootBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode root = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder getRootBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRootFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.TreeNode root = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getRootOrBuilder() { + if (rootBuilder_ != null) { + return rootBuilder_.getMessageOrBuilder(); + } else { + return root_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : root_; + } + } + /** + * optional .gandiva.types.TreeNode root = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder> + getRootFieldBuilder() { + if (rootBuilder_ == null) { + rootBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder>( + getRoot(), + getParentForChildren(), + isClean()); + root_ = null; + } + return rootBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.Condition) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.Condition) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.Condition DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.Condition(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Condition getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Condition parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.Condition getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface SchemaOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.Schema) + com.google.protobuf.MessageOrBuilder { + + /** + * repeated .gandiva.types.Field columns = 1; + */ + java.util.List + getColumnsList(); + /** + * repeated .gandiva.types.Field columns = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.Field getColumns(int index); + /** + * repeated .gandiva.types.Field columns = 1; + */ + int getColumnsCount(); + /** + * repeated .gandiva.types.Field columns = 1; + */ + java.util.List + getColumnsOrBuilderList(); + /** + * repeated .gandiva.types.Field columns = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder getColumnsOrBuilder( + int index); + } + /** + * Protobuf type {@code gandiva.types.Schema} + */ + public static final class Schema extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.Schema) + SchemaOrBuilder { + private static final long serialVersionUID = 0L; + // Use Schema.newBuilder() to construct. + private Schema(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Schema() { + columns_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new Schema(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_Schema_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_Schema_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.Schema.class, org.apache.arrow.gandiva.ipc.GandivaTypes.Schema.Builder.class); + } + + public static final int COLUMNS_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private java.util.List columns_; + /** + * repeated .gandiva.types.Field columns = 1; + */ + @java.lang.Override + public java.util.List getColumnsList() { + return columns_; + } + /** + * repeated .gandiva.types.Field columns = 1; + */ + @java.lang.Override + public java.util.List + getColumnsOrBuilderList() { + return columns_; + } + /** + * repeated .gandiva.types.Field columns = 1; + */ + @java.lang.Override + public int getColumnsCount() { + return columns_.size(); + } + /** + * repeated .gandiva.types.Field columns = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.Field getColumns(int index) { + return columns_.get(index); + } + /** + * repeated .gandiva.types.Field columns = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder getColumnsOrBuilder( + int index) { + return columns_.get(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < columns_.size(); i++) { + output.writeMessage(1, columns_.get(i)); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < columns_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, columns_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.Schema)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.Schema other = (org.apache.arrow.gandiva.ipc.GandivaTypes.Schema) obj; + + if (!getColumnsList() + .equals(other.getColumnsList())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getColumnsCount() > 0) { + hash = (37 * hash) + COLUMNS_FIELD_NUMBER; + hash = (53 * hash) + getColumnsList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Schema parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Schema parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Schema parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Schema parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Schema parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Schema parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Schema parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Schema parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Schema parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Schema parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Schema parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Schema parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.Schema prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.Schema} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.Schema) + org.apache.arrow.gandiva.ipc.GandivaTypes.SchemaOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_Schema_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_Schema_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.Schema.class, org.apache.arrow.gandiva.ipc.GandivaTypes.Schema.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.Schema.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + if (columnsBuilder_ == null) { + columns_ = java.util.Collections.emptyList(); + } else { + columns_ = null; + columnsBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_Schema_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.Schema getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.Schema.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.Schema build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.Schema result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.Schema buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.Schema result = new org.apache.arrow.gandiva.ipc.GandivaTypes.Schema(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields(org.apache.arrow.gandiva.ipc.GandivaTypes.Schema result) { + if (columnsBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + columns_ = java.util.Collections.unmodifiableList(columns_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.columns_ = columns_; + } else { + result.columns_ = columnsBuilder_.build(); + } + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.Schema result) { + int from_bitField0_ = bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.Schema) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.Schema)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.Schema other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.Schema.getDefaultInstance()) return this; + if (columnsBuilder_ == null) { + if (!other.columns_.isEmpty()) { + if (columns_.isEmpty()) { + columns_ = other.columns_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureColumnsIsMutable(); + columns_.addAll(other.columns_); + } + onChanged(); + } + } else { + if (!other.columns_.isEmpty()) { + if (columnsBuilder_.isEmpty()) { + columnsBuilder_.dispose(); + columnsBuilder_ = null; + columns_ = other.columns_; + bitField0_ = (bitField0_ & ~0x00000001); + columnsBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getColumnsFieldBuilder() : null; + } else { + columnsBuilder_.addAllMessages(other.columns_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + org.apache.arrow.gandiva.ipc.GandivaTypes.Field m = + input.readMessage( + org.apache.arrow.gandiva.ipc.GandivaTypes.Field.parser(), + extensionRegistry); + if (columnsBuilder_ == null) { + ensureColumnsIsMutable(); + columns_.add(m); + } else { + columnsBuilder_.addMessage(m); + } + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.util.List columns_ = + java.util.Collections.emptyList(); + private void ensureColumnsIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + columns_ = new java.util.ArrayList(columns_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.Field, org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder> columnsBuilder_; + + /** + * repeated .gandiva.types.Field columns = 1; + */ + public java.util.List getColumnsList() { + if (columnsBuilder_ == null) { + return java.util.Collections.unmodifiableList(columns_); + } else { + return columnsBuilder_.getMessageList(); + } + } + /** + * repeated .gandiva.types.Field columns = 1; + */ + public int getColumnsCount() { + if (columnsBuilder_ == null) { + return columns_.size(); + } else { + return columnsBuilder_.getCount(); + } + } + /** + * repeated .gandiva.types.Field columns = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.Field getColumns(int index) { + if (columnsBuilder_ == null) { + return columns_.get(index); + } else { + return columnsBuilder_.getMessage(index); + } + } + /** + * repeated .gandiva.types.Field columns = 1; + */ + public Builder setColumns( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.Field value) { + if (columnsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnsIsMutable(); + columns_.set(index, value); + onChanged(); + } else { + columnsBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.Field columns = 1; + */ + public Builder setColumns( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder builderForValue) { + if (columnsBuilder_ == null) { + ensureColumnsIsMutable(); + columns_.set(index, builderForValue.build()); + onChanged(); + } else { + columnsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.Field columns = 1; + */ + public Builder addColumns(org.apache.arrow.gandiva.ipc.GandivaTypes.Field value) { + if (columnsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnsIsMutable(); + columns_.add(value); + onChanged(); + } else { + columnsBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .gandiva.types.Field columns = 1; + */ + public Builder addColumns( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.Field value) { + if (columnsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnsIsMutable(); + columns_.add(index, value); + onChanged(); + } else { + columnsBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.Field columns = 1; + */ + public Builder addColumns( + org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder builderForValue) { + if (columnsBuilder_ == null) { + ensureColumnsIsMutable(); + columns_.add(builderForValue.build()); + onChanged(); + } else { + columnsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.Field columns = 1; + */ + public Builder addColumns( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder builderForValue) { + if (columnsBuilder_ == null) { + ensureColumnsIsMutable(); + columns_.add(index, builderForValue.build()); + onChanged(); + } else { + columnsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.Field columns = 1; + */ + public Builder addAllColumns( + java.lang.Iterable values) { + if (columnsBuilder_ == null) { + ensureColumnsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, columns_); + onChanged(); + } else { + columnsBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .gandiva.types.Field columns = 1; + */ + public Builder clearColumns() { + if (columnsBuilder_ == null) { + columns_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + columnsBuilder_.clear(); + } + return this; + } + /** + * repeated .gandiva.types.Field columns = 1; + */ + public Builder removeColumns(int index) { + if (columnsBuilder_ == null) { + ensureColumnsIsMutable(); + columns_.remove(index); + onChanged(); + } else { + columnsBuilder_.remove(index); + } + return this; + } + /** + * repeated .gandiva.types.Field columns = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder getColumnsBuilder( + int index) { + return getColumnsFieldBuilder().getBuilder(index); + } + /** + * repeated .gandiva.types.Field columns = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder getColumnsOrBuilder( + int index) { + if (columnsBuilder_ == null) { + return columns_.get(index); } else { + return columnsBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .gandiva.types.Field columns = 1; + */ + public java.util.List + getColumnsOrBuilderList() { + if (columnsBuilder_ != null) { + return columnsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(columns_); + } + } + /** + * repeated .gandiva.types.Field columns = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder addColumnsBuilder() { + return getColumnsFieldBuilder().addBuilder( + org.apache.arrow.gandiva.ipc.GandivaTypes.Field.getDefaultInstance()); + } + /** + * repeated .gandiva.types.Field columns = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder addColumnsBuilder( + int index) { + return getColumnsFieldBuilder().addBuilder( + index, org.apache.arrow.gandiva.ipc.GandivaTypes.Field.getDefaultInstance()); + } + /** + * repeated .gandiva.types.Field columns = 1; + */ + public java.util.List + getColumnsBuilderList() { + return getColumnsFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.Field, org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder> + getColumnsFieldBuilder() { + if (columnsBuilder_ == null) { + columnsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.Field, org.apache.arrow.gandiva.ipc.GandivaTypes.Field.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FieldOrBuilder>( + columns_, + ((bitField0_ & 0x00000001) != 0), + getParentForChildren(), + isClean()); + columns_ = null; + } + return columnsBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.Schema) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.Schema) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.Schema DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.Schema(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.Schema getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Schema parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.Schema getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface GandivaDataTypesOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.GandivaDataTypes) + com.google.protobuf.MessageOrBuilder { + + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + java.util.List + getDataTypeList(); + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getDataType(int index); + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + int getDataTypeCount(); + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + java.util.List + getDataTypeOrBuilderList(); + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder getDataTypeOrBuilder( + int index); + } + /** + * Protobuf type {@code gandiva.types.GandivaDataTypes} + */ + public static final class GandivaDataTypes extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.GandivaDataTypes) + GandivaDataTypesOrBuilder { + private static final long serialVersionUID = 0L; + // Use GandivaDataTypes.newBuilder() to construct. + private GandivaDataTypes(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private GandivaDataTypes() { + dataType_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new GandivaDataTypes(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_GandivaDataTypes_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_GandivaDataTypes_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes.class, org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes.Builder.class); + } + + public static final int DATATYPE_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private java.util.List dataType_; + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + @java.lang.Override + public java.util.List getDataTypeList() { + return dataType_; + } + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + @java.lang.Override + public java.util.List + getDataTypeOrBuilderList() { + return dataType_; + } + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + @java.lang.Override + public int getDataTypeCount() { + return dataType_.size(); + } + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getDataType(int index) { + return dataType_.get(index); + } + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder getDataTypeOrBuilder( + int index) { + return dataType_.get(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < dataType_.size(); i++) { + output.writeMessage(1, dataType_.get(i)); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < dataType_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, dataType_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes other = (org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes) obj; + + if (!getDataTypeList() + .equals(other.getDataTypeList())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getDataTypeCount() > 0) { + hash = (37 * hash) + DATATYPE_FIELD_NUMBER; + hash = (53 * hash) + getDataTypeList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.GandivaDataTypes} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.GandivaDataTypes) + org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypesOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_GandivaDataTypes_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_GandivaDataTypes_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes.class, org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + if (dataTypeBuilder_ == null) { + dataType_ = java.util.Collections.emptyList(); + } else { + dataType_ = null; + dataTypeBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_GandivaDataTypes_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes result = new org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields(org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes result) { + if (dataTypeBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + dataType_ = java.util.Collections.unmodifiableList(dataType_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.dataType_ = dataType_; + } else { + result.dataType_ = dataTypeBuilder_.build(); + } + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes result) { + int from_bitField0_ = bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes.getDefaultInstance()) return this; + if (dataTypeBuilder_ == null) { + if (!other.dataType_.isEmpty()) { + if (dataType_.isEmpty()) { + dataType_ = other.dataType_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureDataTypeIsMutable(); + dataType_.addAll(other.dataType_); + } + onChanged(); + } + } else { + if (!other.dataType_.isEmpty()) { + if (dataTypeBuilder_.isEmpty()) { + dataTypeBuilder_.dispose(); + dataTypeBuilder_ = null; + dataType_ = other.dataType_; + bitField0_ = (bitField0_ & ~0x00000001); + dataTypeBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getDataTypeFieldBuilder() : null; + } else { + dataTypeBuilder_.addAllMessages(other.dataType_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType m = + input.readMessage( + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.parser(), + extensionRegistry); + if (dataTypeBuilder_ == null) { + ensureDataTypeIsMutable(); + dataType_.add(m); + } else { + dataTypeBuilder_.addMessage(m); + } + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.util.List dataType_ = + java.util.Collections.emptyList(); + private void ensureDataTypeIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + dataType_ = new java.util.ArrayList(dataType_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder> dataTypeBuilder_; + + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + public java.util.List getDataTypeList() { + if (dataTypeBuilder_ == null) { + return java.util.Collections.unmodifiableList(dataType_); + } else { + return dataTypeBuilder_.getMessageList(); + } + } + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + public int getDataTypeCount() { + if (dataTypeBuilder_ == null) { + return dataType_.size(); + } else { + return dataTypeBuilder_.getCount(); + } + } + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getDataType(int index) { + if (dataTypeBuilder_ == null) { + return dataType_.get(index); + } else { + return dataTypeBuilder_.getMessage(index); + } + } + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + public Builder setDataType( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType value) { + if (dataTypeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDataTypeIsMutable(); + dataType_.set(index, value); + onChanged(); + } else { + dataTypeBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + public Builder setDataType( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder builderForValue) { + if (dataTypeBuilder_ == null) { + ensureDataTypeIsMutable(); + dataType_.set(index, builderForValue.build()); + onChanged(); + } else { + dataTypeBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + public Builder addDataType(org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType value) { + if (dataTypeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDataTypeIsMutable(); + dataType_.add(value); + onChanged(); + } else { + dataTypeBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + public Builder addDataType( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType value) { + if (dataTypeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDataTypeIsMutable(); + dataType_.add(index, value); + onChanged(); + } else { + dataTypeBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + public Builder addDataType( + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder builderForValue) { + if (dataTypeBuilder_ == null) { + ensureDataTypeIsMutable(); + dataType_.add(builderForValue.build()); + onChanged(); + } else { + dataTypeBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + public Builder addDataType( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder builderForValue) { + if (dataTypeBuilder_ == null) { + ensureDataTypeIsMutable(); + dataType_.add(index, builderForValue.build()); + onChanged(); + } else { + dataTypeBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + public Builder addAllDataType( + java.lang.Iterable values) { + if (dataTypeBuilder_ == null) { + ensureDataTypeIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, dataType_); + onChanged(); + } else { + dataTypeBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + public Builder clearDataType() { + if (dataTypeBuilder_ == null) { + dataType_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + dataTypeBuilder_.clear(); + } + return this; + } + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + public Builder removeDataType(int index) { + if (dataTypeBuilder_ == null) { + ensureDataTypeIsMutable(); + dataType_.remove(index); + onChanged(); + } else { + dataTypeBuilder_.remove(index); + } + return this; + } + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder getDataTypeBuilder( + int index) { + return getDataTypeFieldBuilder().getBuilder(index); + } + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder getDataTypeOrBuilder( + int index) { + if (dataTypeBuilder_ == null) { + return dataType_.get(index); } else { + return dataTypeBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + public java.util.List + getDataTypeOrBuilderList() { + if (dataTypeBuilder_ != null) { + return dataTypeBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(dataType_); + } + } + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder addDataTypeBuilder() { + return getDataTypeFieldBuilder().addBuilder( + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance()); + } + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder addDataTypeBuilder( + int index) { + return getDataTypeFieldBuilder().addBuilder( + index, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance()); + } + /** + * repeated .gandiva.types.ExtGandivaType dataType = 1; + */ + public java.util.List + getDataTypeBuilderList() { + return getDataTypeFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder> + getDataTypeFieldBuilder() { + if (dataTypeBuilder_ == null) { + dataTypeBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder>( + dataType_, + ((bitField0_ & 0x00000001) != 0), + getParentForChildren(), + isClean()); + dataType_ = null; + } + return dataTypeBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.GandivaDataTypes) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.GandivaDataTypes) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public GandivaDataTypes parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaDataTypes getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface GandivaFunctionsOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.GandivaFunctions) + com.google.protobuf.MessageOrBuilder { + + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + java.util.List + getFunctionList(); + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature getFunction(int index); + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + int getFunctionCount(); + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + java.util.List + getFunctionOrBuilderList(); + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignatureOrBuilder getFunctionOrBuilder( + int index); + } + /** + * Protobuf type {@code gandiva.types.GandivaFunctions} + */ + public static final class GandivaFunctions extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.GandivaFunctions) + GandivaFunctionsOrBuilder { + private static final long serialVersionUID = 0L; + // Use GandivaFunctions.newBuilder() to construct. + private GandivaFunctions(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private GandivaFunctions() { + function_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new GandivaFunctions(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_GandivaFunctions_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_GandivaFunctions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions.class, org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions.Builder.class); + } + + public static final int FUNCTION_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private java.util.List function_; + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + @java.lang.Override + public java.util.List getFunctionList() { + return function_; + } + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + @java.lang.Override + public java.util.List + getFunctionOrBuilderList() { + return function_; + } + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + @java.lang.Override + public int getFunctionCount() { + return function_.size(); + } + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature getFunction(int index) { + return function_.get(index); + } + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignatureOrBuilder getFunctionOrBuilder( + int index) { + return function_.get(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < function_.size(); i++) { + output.writeMessage(1, function_.get(i)); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < function_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, function_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions other = (org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions) obj; + + if (!getFunctionList() + .equals(other.getFunctionList())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getFunctionCount() > 0) { + hash = (37 * hash) + FUNCTION_FIELD_NUMBER; + hash = (53 * hash) + getFunctionList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.GandivaFunctions} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.GandivaFunctions) + org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctionsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_GandivaFunctions_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_GandivaFunctions_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions.class, org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + if (functionBuilder_ == null) { + function_ = java.util.Collections.emptyList(); + } else { + function_ = null; + functionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_GandivaFunctions_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions result = new org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields(org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions result) { + if (functionBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + function_ = java.util.Collections.unmodifiableList(function_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.function_ = function_; + } else { + result.function_ = functionBuilder_.build(); + } + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions result) { + int from_bitField0_ = bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions.getDefaultInstance()) return this; + if (functionBuilder_ == null) { + if (!other.function_.isEmpty()) { + if (function_.isEmpty()) { + function_ = other.function_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureFunctionIsMutable(); + function_.addAll(other.function_); + } + onChanged(); + } + } else { + if (!other.function_.isEmpty()) { + if (functionBuilder_.isEmpty()) { + functionBuilder_.dispose(); + functionBuilder_ = null; + function_ = other.function_; + bitField0_ = (bitField0_ & ~0x00000001); + functionBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getFunctionFieldBuilder() : null; + } else { + functionBuilder_.addAllMessages(other.function_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature m = + input.readMessage( + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature.parser(), + extensionRegistry); + if (functionBuilder_ == null) { + ensureFunctionIsMutable(); + function_.add(m); + } else { + functionBuilder_.addMessage(m); + } + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.util.List function_ = + java.util.Collections.emptyList(); + private void ensureFunctionIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + function_ = new java.util.ArrayList(function_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature, org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignatureOrBuilder> functionBuilder_; + + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + public java.util.List getFunctionList() { + if (functionBuilder_ == null) { + return java.util.Collections.unmodifiableList(function_); + } else { + return functionBuilder_.getMessageList(); + } + } + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + public int getFunctionCount() { + if (functionBuilder_ == null) { + return function_.size(); + } else { + return functionBuilder_.getCount(); + } + } + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature getFunction(int index) { + if (functionBuilder_ == null) { + return function_.get(index); + } else { + return functionBuilder_.getMessage(index); + } + } + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + public Builder setFunction( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature value) { + if (functionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFunctionIsMutable(); + function_.set(index, value); + onChanged(); + } else { + functionBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + public Builder setFunction( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature.Builder builderForValue) { + if (functionBuilder_ == null) { + ensureFunctionIsMutable(); + function_.set(index, builderForValue.build()); + onChanged(); + } else { + functionBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + public Builder addFunction(org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature value) { + if (functionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFunctionIsMutable(); + function_.add(value); + onChanged(); + } else { + functionBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + public Builder addFunction( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature value) { + if (functionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFunctionIsMutable(); + function_.add(index, value); + onChanged(); + } else { + functionBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + public Builder addFunction( + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature.Builder builderForValue) { + if (functionBuilder_ == null) { + ensureFunctionIsMutable(); + function_.add(builderForValue.build()); + onChanged(); + } else { + functionBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + public Builder addFunction( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature.Builder builderForValue) { + if (functionBuilder_ == null) { + ensureFunctionIsMutable(); + function_.add(index, builderForValue.build()); + onChanged(); + } else { + functionBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + public Builder addAllFunction( + java.lang.Iterable values) { + if (functionBuilder_ == null) { + ensureFunctionIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, function_); + onChanged(); + } else { + functionBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + public Builder clearFunction() { + if (functionBuilder_ == null) { + function_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + functionBuilder_.clear(); + } + return this; + } + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + public Builder removeFunction(int index) { + if (functionBuilder_ == null) { + ensureFunctionIsMutable(); + function_.remove(index); + onChanged(); + } else { + functionBuilder_.remove(index); + } + return this; + } + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature.Builder getFunctionBuilder( + int index) { + return getFunctionFieldBuilder().getBuilder(index); + } + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignatureOrBuilder getFunctionOrBuilder( + int index) { + if (functionBuilder_ == null) { + return function_.get(index); } else { + return functionBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + public java.util.List + getFunctionOrBuilderList() { + if (functionBuilder_ != null) { + return functionBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(function_); + } + } + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature.Builder addFunctionBuilder() { + return getFunctionFieldBuilder().addBuilder( + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature.getDefaultInstance()); + } + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature.Builder addFunctionBuilder( + int index) { + return getFunctionFieldBuilder().addBuilder( + index, org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature.getDefaultInstance()); + } + /** + * repeated .gandiva.types.FunctionSignature function = 1; + */ + public java.util.List + getFunctionBuilderList() { + return getFunctionFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature, org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignatureOrBuilder> + getFunctionFieldBuilder() { + if (functionBuilder_ == null) { + functionBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature, org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignatureOrBuilder>( + function_, + ((bitField0_ & 0x00000001) != 0), + getParentForChildren(), + isClean()); + function_ = null; + } + return functionBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.GandivaFunctions) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.GandivaFunctions) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public GandivaFunctions parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.GandivaFunctions getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface FunctionSignatureOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.FunctionSignature) + com.google.protobuf.MessageOrBuilder { + + /** + * optional string name = 1; + * @return Whether the name field is set. + */ + boolean hasName(); + /** + * optional string name = 1; + * @return The name. + */ + java.lang.String getName(); + /** + * optional string name = 1; + * @return The bytes for name. + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + * optional .gandiva.types.ExtGandivaType returnType = 2; + * @return Whether the returnType field is set. + */ + boolean hasReturnType(); + /** + * optional .gandiva.types.ExtGandivaType returnType = 2; + * @return The returnType. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getReturnType(); + /** + * optional .gandiva.types.ExtGandivaType returnType = 2; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder getReturnTypeOrBuilder(); + + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + java.util.List + getParamTypesList(); + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getParamTypes(int index); + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + int getParamTypesCount(); + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + java.util.List + getParamTypesOrBuilderList(); + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder getParamTypesOrBuilder( + int index); + } + /** + * Protobuf type {@code gandiva.types.FunctionSignature} + */ + public static final class FunctionSignature extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.FunctionSignature) + FunctionSignatureOrBuilder { + private static final long serialVersionUID = 0L; + // Use FunctionSignature.newBuilder() to construct. + private FunctionSignature(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private FunctionSignature() { + name_ = ""; + paramTypes_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new FunctionSignature(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FunctionSignature_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FunctionSignature_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature.class, org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature.Builder.class); + } + + private int bitField0_; + public static final int NAME_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + * optional string name = 1; + * @return Whether the name field is set. + */ + @java.lang.Override + public boolean hasName() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional string name = 1; + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * optional string name = 1; + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int RETURNTYPE_FIELD_NUMBER = 2; + private org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType returnType_; + /** + * optional .gandiva.types.ExtGandivaType returnType = 2; + * @return Whether the returnType field is set. + */ + @java.lang.Override + public boolean hasReturnType() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 2; + * @return The returnType. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getReturnType() { + return returnType_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance() : returnType_; + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 2; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder getReturnTypeOrBuilder() { + return returnType_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance() : returnType_; + } + + public static final int PARAMTYPES_FIELD_NUMBER = 3; + @SuppressWarnings("serial") + private java.util.List paramTypes_; + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + @java.lang.Override + public java.util.List getParamTypesList() { + return paramTypes_; + } + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + @java.lang.Override + public java.util.List + getParamTypesOrBuilderList() { + return paramTypes_; + } + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + @java.lang.Override + public int getParamTypesCount() { + return paramTypes_.size(); + } + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getParamTypes(int index) { + return paramTypes_.get(index); + } + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder getParamTypesOrBuilder( + int index) { + return paramTypes_.get(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); + } + if (((bitField0_ & 0x00000002) != 0)) { + output.writeMessage(2, getReturnType()); + } + for (int i = 0; i < paramTypes_.size(); i++) { + output.writeMessage(3, paramTypes_.get(i)); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); + } + if (((bitField0_ & 0x00000002) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, getReturnType()); + } + for (int i = 0; i < paramTypes_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, paramTypes_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature other = (org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature) obj; + + if (hasName() != other.hasName()) return false; + if (hasName()) { + if (!getName() + .equals(other.getName())) return false; + } + if (hasReturnType() != other.hasReturnType()) return false; + if (hasReturnType()) { + if (!getReturnType() + .equals(other.getReturnType())) return false; + } + if (!getParamTypesList() + .equals(other.getParamTypesList())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasName()) { + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + } + if (hasReturnType()) { + hash = (37 * hash) + RETURNTYPE_FIELD_NUMBER; + hash = (53 * hash) + getReturnType().hashCode(); + } + if (getParamTypesCount() > 0) { + hash = (37 * hash) + PARAMTYPES_FIELD_NUMBER; + hash = (53 * hash) + getParamTypesList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.FunctionSignature} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.FunctionSignature) + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignatureOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FunctionSignature_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FunctionSignature_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature.class, org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getReturnTypeFieldBuilder(); + getParamTypesFieldBuilder(); + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + name_ = ""; + returnType_ = null; + if (returnTypeBuilder_ != null) { + returnTypeBuilder_.dispose(); + returnTypeBuilder_ = null; + } + if (paramTypesBuilder_ == null) { + paramTypes_ = java.util.Collections.emptyList(); + } else { + paramTypes_ = null; + paramTypesBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FunctionSignature_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature result = new org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields(org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature result) { + if (paramTypesBuilder_ == null) { + if (((bitField0_ & 0x00000004) != 0)) { + paramTypes_ = java.util.Collections.unmodifiableList(paramTypes_); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.paramTypes_ = paramTypes_; + } else { + result.paramTypes_ = paramTypesBuilder_.build(); + } + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.name_ = name_; + to_bitField0_ |= 0x00000001; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.returnType_ = returnTypeBuilder_ == null + ? returnType_ + : returnTypeBuilder_.build(); + to_bitField0_ |= 0x00000002; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature.getDefaultInstance()) return this; + if (other.hasName()) { + name_ = other.name_; + bitField0_ |= 0x00000001; + onChanged(); + } + if (other.hasReturnType()) { + mergeReturnType(other.getReturnType()); + } + if (paramTypesBuilder_ == null) { + if (!other.paramTypes_.isEmpty()) { + if (paramTypes_.isEmpty()) { + paramTypes_ = other.paramTypes_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureParamTypesIsMutable(); + paramTypes_.addAll(other.paramTypes_); + } + onChanged(); + } + } else { + if (!other.paramTypes_.isEmpty()) { + if (paramTypesBuilder_.isEmpty()) { + paramTypesBuilder_.dispose(); + paramTypesBuilder_ = null; + paramTypes_ = other.paramTypes_; + bitField0_ = (bitField0_ & ~0x00000004); + paramTypesBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getParamTypesFieldBuilder() : null; + } else { + paramTypesBuilder_.addAllMessages(other.paramTypes_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + input.readMessage( + getReturnTypeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: { + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType m = + input.readMessage( + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.parser(), + extensionRegistry); + if (paramTypesBuilder_ == null) { + ensureParamTypesIsMutable(); + paramTypes_.add(m); + } else { + paramTypesBuilder_.addMessage(m); + } + break; + } // case 26 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.lang.Object name_ = ""; + /** + * optional string name = 1; + * @return Whether the name field is set. + */ + public boolean hasName() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional string name = 1; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * optional string name = 1; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * optional string name = 1; + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional string name = 1; + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + * optional string name = 1; + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType returnType_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder> returnTypeBuilder_; + /** + * optional .gandiva.types.ExtGandivaType returnType = 2; + * @return Whether the returnType field is set. + */ + public boolean hasReturnType() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 2; + * @return The returnType. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getReturnType() { + if (returnTypeBuilder_ == null) { + return returnType_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance() : returnType_; + } else { + return returnTypeBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 2; + */ + public Builder setReturnType(org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType value) { + if (returnTypeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + returnType_ = value; + } else { + returnTypeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 2; + */ + public Builder setReturnType( + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder builderForValue) { + if (returnTypeBuilder_ == null) { + returnType_ = builderForValue.build(); + } else { + returnTypeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 2; + */ + public Builder mergeReturnType(org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType value) { + if (returnTypeBuilder_ == null) { + if (((bitField0_ & 0x00000002) != 0) && + returnType_ != null && + returnType_ != org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance()) { + getReturnTypeBuilder().mergeFrom(value); + } else { + returnType_ = value; + } + } else { + returnTypeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 2; + */ + public Builder clearReturnType() { + bitField0_ = (bitField0_ & ~0x00000002); + returnType_ = null; + if (returnTypeBuilder_ != null) { + returnTypeBuilder_.dispose(); + returnTypeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 2; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder getReturnTypeBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getReturnTypeFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 2; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder getReturnTypeOrBuilder() { + if (returnTypeBuilder_ != null) { + return returnTypeBuilder_.getMessageOrBuilder(); + } else { + return returnType_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance() : returnType_; + } + } + /** + * optional .gandiva.types.ExtGandivaType returnType = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder> + getReturnTypeFieldBuilder() { + if (returnTypeBuilder_ == null) { + returnTypeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder>( + getReturnType(), + getParentForChildren(), + isClean()); + returnType_ = null; + } + return returnTypeBuilder_; + } + + private java.util.List paramTypes_ = + java.util.Collections.emptyList(); + private void ensureParamTypesIsMutable() { + if (!((bitField0_ & 0x00000004) != 0)) { + paramTypes_ = new java.util.ArrayList(paramTypes_); + bitField0_ |= 0x00000004; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder> paramTypesBuilder_; + + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + public java.util.List getParamTypesList() { + if (paramTypesBuilder_ == null) { + return java.util.Collections.unmodifiableList(paramTypes_); + } else { + return paramTypesBuilder_.getMessageList(); + } + } + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + public int getParamTypesCount() { + if (paramTypesBuilder_ == null) { + return paramTypes_.size(); + } else { + return paramTypesBuilder_.getCount(); + } + } + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType getParamTypes(int index) { + if (paramTypesBuilder_ == null) { + return paramTypes_.get(index); + } else { + return paramTypesBuilder_.getMessage(index); + } + } + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + public Builder setParamTypes( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType value) { + if (paramTypesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureParamTypesIsMutable(); + paramTypes_.set(index, value); + onChanged(); + } else { + paramTypesBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + public Builder setParamTypes( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder builderForValue) { + if (paramTypesBuilder_ == null) { + ensureParamTypesIsMutable(); + paramTypes_.set(index, builderForValue.build()); + onChanged(); + } else { + paramTypesBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + public Builder addParamTypes(org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType value) { + if (paramTypesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureParamTypesIsMutable(); + paramTypes_.add(value); + onChanged(); + } else { + paramTypesBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + public Builder addParamTypes( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType value) { + if (paramTypesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureParamTypesIsMutable(); + paramTypes_.add(index, value); + onChanged(); + } else { + paramTypesBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + public Builder addParamTypes( + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder builderForValue) { + if (paramTypesBuilder_ == null) { + ensureParamTypesIsMutable(); + paramTypes_.add(builderForValue.build()); + onChanged(); + } else { + paramTypesBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + public Builder addParamTypes( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder builderForValue) { + if (paramTypesBuilder_ == null) { + ensureParamTypesIsMutable(); + paramTypes_.add(index, builderForValue.build()); + onChanged(); + } else { + paramTypesBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + public Builder addAllParamTypes( + java.lang.Iterable values) { + if (paramTypesBuilder_ == null) { + ensureParamTypesIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, paramTypes_); + onChanged(); + } else { + paramTypesBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + public Builder clearParamTypes() { + if (paramTypesBuilder_ == null) { + paramTypes_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + } else { + paramTypesBuilder_.clear(); + } + return this; + } + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + public Builder removeParamTypes(int index) { + if (paramTypesBuilder_ == null) { + ensureParamTypesIsMutable(); + paramTypes_.remove(index); + onChanged(); + } else { + paramTypesBuilder_.remove(index); + } + return this; + } + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder getParamTypesBuilder( + int index) { + return getParamTypesFieldBuilder().getBuilder(index); + } + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder getParamTypesOrBuilder( + int index) { + if (paramTypesBuilder_ == null) { + return paramTypes_.get(index); } else { + return paramTypesBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + public java.util.List + getParamTypesOrBuilderList() { + if (paramTypesBuilder_ != null) { + return paramTypesBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(paramTypes_); + } + } + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder addParamTypesBuilder() { + return getParamTypesFieldBuilder().addBuilder( + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance()); + } + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder addParamTypesBuilder( + int index) { + return getParamTypesFieldBuilder().addBuilder( + index, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.getDefaultInstance()); + } + /** + * repeated .gandiva.types.ExtGandivaType paramTypes = 3; + */ + public java.util.List + getParamTypesBuilderList() { + return getParamTypesFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder> + getParamTypesFieldBuilder() { + if (paramTypesBuilder_ == null) { + paramTypesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaType.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.ExtGandivaTypeOrBuilder>( + paramTypes_, + ((bitField0_ & 0x00000004) != 0), + getParentForChildren(), + isClean()); + paramTypes_ = null; + } + return paramTypesBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.FunctionSignature) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.FunctionSignature) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public FunctionSignature parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FunctionSignature getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface InNodeOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.InNode) + com.google.protobuf.MessageOrBuilder { + + /** + * optional .gandiva.types.TreeNode node = 1; + * @return Whether the node field is set. + */ + boolean hasNode(); + /** + * optional .gandiva.types.TreeNode node = 1; + * @return The node. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getNode(); + /** + * optional .gandiva.types.TreeNode node = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getNodeOrBuilder(); + + /** + * optional .gandiva.types.IntConstants intValues = 2; + * @return Whether the intValues field is set. + */ + boolean hasIntValues(); + /** + * optional .gandiva.types.IntConstants intValues = 2; + * @return The intValues. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants getIntValues(); + /** + * optional .gandiva.types.IntConstants intValues = 2; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstantsOrBuilder getIntValuesOrBuilder(); + + /** + * optional .gandiva.types.LongConstants longValues = 3; + * @return Whether the longValues field is set. + */ + boolean hasLongValues(); + /** + * optional .gandiva.types.LongConstants longValues = 3; + * @return The longValues. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants getLongValues(); + /** + * optional .gandiva.types.LongConstants longValues = 3; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstantsOrBuilder getLongValuesOrBuilder(); + + /** + * optional .gandiva.types.StringConstants stringValues = 4; + * @return Whether the stringValues field is set. + */ + boolean hasStringValues(); + /** + * optional .gandiva.types.StringConstants stringValues = 4; + * @return The stringValues. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants getStringValues(); + /** + * optional .gandiva.types.StringConstants stringValues = 4; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstantsOrBuilder getStringValuesOrBuilder(); + + /** + * optional .gandiva.types.BinaryConstants binaryValues = 5; + * @return Whether the binaryValues field is set. + */ + boolean hasBinaryValues(); + /** + * optional .gandiva.types.BinaryConstants binaryValues = 5; + * @return The binaryValues. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants getBinaryValues(); + /** + * optional .gandiva.types.BinaryConstants binaryValues = 5; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstantsOrBuilder getBinaryValuesOrBuilder(); + + /** + * optional .gandiva.types.DecimalConstants decimalValues = 6; + * @return Whether the decimalValues field is set. + */ + boolean hasDecimalValues(); + /** + * optional .gandiva.types.DecimalConstants decimalValues = 6; + * @return The decimalValues. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants getDecimalValues(); + /** + * optional .gandiva.types.DecimalConstants decimalValues = 6; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstantsOrBuilder getDecimalValuesOrBuilder(); + + /** + * optional .gandiva.types.FloatConstants floatValues = 7; + * @return Whether the floatValues field is set. + */ + boolean hasFloatValues(); + /** + * optional .gandiva.types.FloatConstants floatValues = 7; + * @return The floatValues. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants getFloatValues(); + /** + * optional .gandiva.types.FloatConstants floatValues = 7; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstantsOrBuilder getFloatValuesOrBuilder(); + + /** + * optional .gandiva.types.DoubleConstants doubleValues = 8; + * @return Whether the doubleValues field is set. + */ + boolean hasDoubleValues(); + /** + * optional .gandiva.types.DoubleConstants doubleValues = 8; + * @return The doubleValues. + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants getDoubleValues(); + /** + * optional .gandiva.types.DoubleConstants doubleValues = 8; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstantsOrBuilder getDoubleValuesOrBuilder(); + } + /** + * Protobuf type {@code gandiva.types.InNode} + */ + public static final class InNode extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.InNode) + InNodeOrBuilder { + private static final long serialVersionUID = 0L; + // Use InNode.newBuilder() to construct. + private InNode(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private InNode() { + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new InNode(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_InNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_InNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.InNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.InNode.Builder.class); + } + + private int bitField0_; + public static final int NODE_FIELD_NUMBER = 1; + private org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode node_; + /** + * optional .gandiva.types.TreeNode node = 1; + * @return Whether the node field is set. + */ + @java.lang.Override + public boolean hasNode() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional .gandiva.types.TreeNode node = 1; + * @return The node. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getNode() { + return node_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : node_; + } + /** + * optional .gandiva.types.TreeNode node = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getNodeOrBuilder() { + return node_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : node_; + } + + public static final int INTVALUES_FIELD_NUMBER = 2; + private org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants intValues_; + /** + * optional .gandiva.types.IntConstants intValues = 2; + * @return Whether the intValues field is set. + */ + @java.lang.Override + public boolean hasIntValues() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * optional .gandiva.types.IntConstants intValues = 2; + * @return The intValues. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants getIntValues() { + return intValues_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants.getDefaultInstance() : intValues_; + } + /** + * optional .gandiva.types.IntConstants intValues = 2; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstantsOrBuilder getIntValuesOrBuilder() { + return intValues_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants.getDefaultInstance() : intValues_; + } + + public static final int LONGVALUES_FIELD_NUMBER = 3; + private org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants longValues_; + /** + * optional .gandiva.types.LongConstants longValues = 3; + * @return Whether the longValues field is set. + */ + @java.lang.Override + public boolean hasLongValues() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + * optional .gandiva.types.LongConstants longValues = 3; + * @return The longValues. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants getLongValues() { + return longValues_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants.getDefaultInstance() : longValues_; + } + /** + * optional .gandiva.types.LongConstants longValues = 3; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstantsOrBuilder getLongValuesOrBuilder() { + return longValues_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants.getDefaultInstance() : longValues_; + } + + public static final int STRINGVALUES_FIELD_NUMBER = 4; + private org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants stringValues_; + /** + * optional .gandiva.types.StringConstants stringValues = 4; + * @return Whether the stringValues field is set. + */ + @java.lang.Override + public boolean hasStringValues() { + return ((bitField0_ & 0x00000008) != 0); + } + /** + * optional .gandiva.types.StringConstants stringValues = 4; + * @return The stringValues. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants getStringValues() { + return stringValues_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants.getDefaultInstance() : stringValues_; + } + /** + * optional .gandiva.types.StringConstants stringValues = 4; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstantsOrBuilder getStringValuesOrBuilder() { + return stringValues_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants.getDefaultInstance() : stringValues_; + } + + public static final int BINARYVALUES_FIELD_NUMBER = 5; + private org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants binaryValues_; + /** + * optional .gandiva.types.BinaryConstants binaryValues = 5; + * @return Whether the binaryValues field is set. + */ + @java.lang.Override + public boolean hasBinaryValues() { + return ((bitField0_ & 0x00000010) != 0); + } + /** + * optional .gandiva.types.BinaryConstants binaryValues = 5; + * @return The binaryValues. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants getBinaryValues() { + return binaryValues_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants.getDefaultInstance() : binaryValues_; + } + /** + * optional .gandiva.types.BinaryConstants binaryValues = 5; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstantsOrBuilder getBinaryValuesOrBuilder() { + return binaryValues_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants.getDefaultInstance() : binaryValues_; + } + + public static final int DECIMALVALUES_FIELD_NUMBER = 6; + private org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants decimalValues_; + /** + * optional .gandiva.types.DecimalConstants decimalValues = 6; + * @return Whether the decimalValues field is set. + */ + @java.lang.Override + public boolean hasDecimalValues() { + return ((bitField0_ & 0x00000020) != 0); + } + /** + * optional .gandiva.types.DecimalConstants decimalValues = 6; + * @return The decimalValues. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants getDecimalValues() { + return decimalValues_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants.getDefaultInstance() : decimalValues_; + } + /** + * optional .gandiva.types.DecimalConstants decimalValues = 6; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstantsOrBuilder getDecimalValuesOrBuilder() { + return decimalValues_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants.getDefaultInstance() : decimalValues_; + } + + public static final int FLOATVALUES_FIELD_NUMBER = 7; + private org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants floatValues_; + /** + * optional .gandiva.types.FloatConstants floatValues = 7; + * @return Whether the floatValues field is set. + */ + @java.lang.Override + public boolean hasFloatValues() { + return ((bitField0_ & 0x00000040) != 0); + } + /** + * optional .gandiva.types.FloatConstants floatValues = 7; + * @return The floatValues. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants getFloatValues() { + return floatValues_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants.getDefaultInstance() : floatValues_; + } + /** + * optional .gandiva.types.FloatConstants floatValues = 7; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstantsOrBuilder getFloatValuesOrBuilder() { + return floatValues_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants.getDefaultInstance() : floatValues_; + } + + public static final int DOUBLEVALUES_FIELD_NUMBER = 8; + private org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants doubleValues_; + /** + * optional .gandiva.types.DoubleConstants doubleValues = 8; + * @return Whether the doubleValues field is set. + */ + @java.lang.Override + public boolean hasDoubleValues() { + return ((bitField0_ & 0x00000080) != 0); + } + /** + * optional .gandiva.types.DoubleConstants doubleValues = 8; + * @return The doubleValues. + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants getDoubleValues() { + return doubleValues_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants.getDefaultInstance() : doubleValues_; + } + /** + * optional .gandiva.types.DoubleConstants doubleValues = 8; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstantsOrBuilder getDoubleValuesOrBuilder() { + return doubleValues_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants.getDefaultInstance() : doubleValues_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) != 0)) { + output.writeMessage(1, getNode()); + } + if (((bitField0_ & 0x00000002) != 0)) { + output.writeMessage(2, getIntValues()); + } + if (((bitField0_ & 0x00000004) != 0)) { + output.writeMessage(3, getLongValues()); + } + if (((bitField0_ & 0x00000008) != 0)) { + output.writeMessage(4, getStringValues()); + } + if (((bitField0_ & 0x00000010) != 0)) { + output.writeMessage(5, getBinaryValues()); + } + if (((bitField0_ & 0x00000020) != 0)) { + output.writeMessage(6, getDecimalValues()); + } + if (((bitField0_ & 0x00000040) != 0)) { + output.writeMessage(7, getFloatValues()); + } + if (((bitField0_ & 0x00000080) != 0)) { + output.writeMessage(8, getDoubleValues()); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getNode()); + } + if (((bitField0_ & 0x00000002) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, getIntValues()); + } + if (((bitField0_ & 0x00000004) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, getLongValues()); + } + if (((bitField0_ & 0x00000008) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, getStringValues()); + } + if (((bitField0_ & 0x00000010) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, getBinaryValues()); + } + if (((bitField0_ & 0x00000020) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(6, getDecimalValues()); + } + if (((bitField0_ & 0x00000040) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(7, getFloatValues()); + } + if (((bitField0_ & 0x00000080) != 0)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(8, getDoubleValues()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.InNode)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.InNode other = (org.apache.arrow.gandiva.ipc.GandivaTypes.InNode) obj; + + if (hasNode() != other.hasNode()) return false; + if (hasNode()) { + if (!getNode() + .equals(other.getNode())) return false; + } + if (hasIntValues() != other.hasIntValues()) return false; + if (hasIntValues()) { + if (!getIntValues() + .equals(other.getIntValues())) return false; + } + if (hasLongValues() != other.hasLongValues()) return false; + if (hasLongValues()) { + if (!getLongValues() + .equals(other.getLongValues())) return false; + } + if (hasStringValues() != other.hasStringValues()) return false; + if (hasStringValues()) { + if (!getStringValues() + .equals(other.getStringValues())) return false; + } + if (hasBinaryValues() != other.hasBinaryValues()) return false; + if (hasBinaryValues()) { + if (!getBinaryValues() + .equals(other.getBinaryValues())) return false; + } + if (hasDecimalValues() != other.hasDecimalValues()) return false; + if (hasDecimalValues()) { + if (!getDecimalValues() + .equals(other.getDecimalValues())) return false; + } + if (hasFloatValues() != other.hasFloatValues()) return false; + if (hasFloatValues()) { + if (!getFloatValues() + .equals(other.getFloatValues())) return false; + } + if (hasDoubleValues() != other.hasDoubleValues()) return false; + if (hasDoubleValues()) { + if (!getDoubleValues() + .equals(other.getDoubleValues())) return false; + } + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasNode()) { + hash = (37 * hash) + NODE_FIELD_NUMBER; + hash = (53 * hash) + getNode().hashCode(); + } + if (hasIntValues()) { + hash = (37 * hash) + INTVALUES_FIELD_NUMBER; + hash = (53 * hash) + getIntValues().hashCode(); + } + if (hasLongValues()) { + hash = (37 * hash) + LONGVALUES_FIELD_NUMBER; + hash = (53 * hash) + getLongValues().hashCode(); + } + if (hasStringValues()) { + hash = (37 * hash) + STRINGVALUES_FIELD_NUMBER; + hash = (53 * hash) + getStringValues().hashCode(); + } + if (hasBinaryValues()) { + hash = (37 * hash) + BINARYVALUES_FIELD_NUMBER; + hash = (53 * hash) + getBinaryValues().hashCode(); + } + if (hasDecimalValues()) { + hash = (37 * hash) + DECIMALVALUES_FIELD_NUMBER; + hash = (53 * hash) + getDecimalValues().hashCode(); + } + if (hasFloatValues()) { + hash = (37 * hash) + FLOATVALUES_FIELD_NUMBER; + hash = (53 * hash) + getFloatValues().hashCode(); + } + if (hasDoubleValues()) { + hash = (37 * hash) + DOUBLEVALUES_FIELD_NUMBER; + hash = (53 * hash) + getDoubleValues().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.InNode parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.InNode parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.InNode parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.InNode parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.InNode parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.InNode parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.InNode parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.InNode parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.InNode parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.InNode parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.InNode parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.InNode parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.InNode prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.InNode} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.InNode) + org.apache.arrow.gandiva.ipc.GandivaTypes.InNodeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_InNode_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_InNode_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.InNode.class, org.apache.arrow.gandiva.ipc.GandivaTypes.InNode.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.InNode.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getNodeFieldBuilder(); + getIntValuesFieldBuilder(); + getLongValuesFieldBuilder(); + getStringValuesFieldBuilder(); + getBinaryValuesFieldBuilder(); + getDecimalValuesFieldBuilder(); + getFloatValuesFieldBuilder(); + getDoubleValuesFieldBuilder(); + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + node_ = null; + if (nodeBuilder_ != null) { + nodeBuilder_.dispose(); + nodeBuilder_ = null; + } + intValues_ = null; + if (intValuesBuilder_ != null) { + intValuesBuilder_.dispose(); + intValuesBuilder_ = null; + } + longValues_ = null; + if (longValuesBuilder_ != null) { + longValuesBuilder_.dispose(); + longValuesBuilder_ = null; + } + stringValues_ = null; + if (stringValuesBuilder_ != null) { + stringValuesBuilder_.dispose(); + stringValuesBuilder_ = null; + } + binaryValues_ = null; + if (binaryValuesBuilder_ != null) { + binaryValuesBuilder_.dispose(); + binaryValuesBuilder_ = null; + } + decimalValues_ = null; + if (decimalValuesBuilder_ != null) { + decimalValuesBuilder_.dispose(); + decimalValuesBuilder_ = null; + } + floatValues_ = null; + if (floatValuesBuilder_ != null) { + floatValuesBuilder_.dispose(); + floatValuesBuilder_ = null; + } + doubleValues_ = null; + if (doubleValuesBuilder_ != null) { + doubleValuesBuilder_.dispose(); + doubleValuesBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_InNode_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.InNode getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.InNode.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.InNode build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.InNode result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.InNode buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.InNode result = new org.apache.arrow.gandiva.ipc.GandivaTypes.InNode(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.InNode result) { + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.node_ = nodeBuilder_ == null + ? node_ + : nodeBuilder_.build(); + to_bitField0_ |= 0x00000001; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.intValues_ = intValuesBuilder_ == null + ? intValues_ + : intValuesBuilder_.build(); + to_bitField0_ |= 0x00000002; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.longValues_ = longValuesBuilder_ == null + ? longValues_ + : longValuesBuilder_.build(); + to_bitField0_ |= 0x00000004; + } + if (((from_bitField0_ & 0x00000008) != 0)) { + result.stringValues_ = stringValuesBuilder_ == null + ? stringValues_ + : stringValuesBuilder_.build(); + to_bitField0_ |= 0x00000008; + } + if (((from_bitField0_ & 0x00000010) != 0)) { + result.binaryValues_ = binaryValuesBuilder_ == null + ? binaryValues_ + : binaryValuesBuilder_.build(); + to_bitField0_ |= 0x00000010; + } + if (((from_bitField0_ & 0x00000020) != 0)) { + result.decimalValues_ = decimalValuesBuilder_ == null + ? decimalValues_ + : decimalValuesBuilder_.build(); + to_bitField0_ |= 0x00000020; + } + if (((from_bitField0_ & 0x00000040) != 0)) { + result.floatValues_ = floatValuesBuilder_ == null + ? floatValues_ + : floatValuesBuilder_.build(); + to_bitField0_ |= 0x00000040; + } + if (((from_bitField0_ & 0x00000080) != 0)) { + result.doubleValues_ = doubleValuesBuilder_ == null + ? doubleValues_ + : doubleValuesBuilder_.build(); + to_bitField0_ |= 0x00000080; + } + result.bitField0_ |= to_bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.InNode) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.InNode)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.InNode other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.InNode.getDefaultInstance()) return this; + if (other.hasNode()) { + mergeNode(other.getNode()); + } + if (other.hasIntValues()) { + mergeIntValues(other.getIntValues()); + } + if (other.hasLongValues()) { + mergeLongValues(other.getLongValues()); + } + if (other.hasStringValues()) { + mergeStringValues(other.getStringValues()); + } + if (other.hasBinaryValues()) { + mergeBinaryValues(other.getBinaryValues()); + } + if (other.hasDecimalValues()) { + mergeDecimalValues(other.getDecimalValues()); + } + if (other.hasFloatValues()) { + mergeFloatValues(other.getFloatValues()); + } + if (other.hasDoubleValues()) { + mergeDoubleValues(other.getDoubleValues()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + input.readMessage( + getNodeFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000001; + break; + } // case 10 + case 18: { + input.readMessage( + getIntValuesFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 26: { + input.readMessage( + getLongValuesFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000004; + break; + } // case 26 + case 34: { + input.readMessage( + getStringValuesFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000008; + break; + } // case 34 + case 42: { + input.readMessage( + getBinaryValuesFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000010; + break; + } // case 42 + case 50: { + input.readMessage( + getDecimalValuesFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000020; + break; + } // case 50 + case 58: { + input.readMessage( + getFloatValuesFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000040; + break; + } // case 58 + case 66: { + input.readMessage( + getDoubleValuesFieldBuilder().getBuilder(), + extensionRegistry); + bitField0_ |= 0x00000080; + break; + } // case 66 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode node_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder> nodeBuilder_; + /** + * optional .gandiva.types.TreeNode node = 1; + * @return Whether the node field is set. + */ + public boolean hasNode() { + return ((bitField0_ & 0x00000001) != 0); + } + /** + * optional .gandiva.types.TreeNode node = 1; + * @return The node. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode getNode() { + if (nodeBuilder_ == null) { + return node_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : node_; + } else { + return nodeBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.TreeNode node = 1; + */ + public Builder setNode(org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode value) { + if (nodeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + node_ = value; + } else { + nodeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode node = 1; + */ + public Builder setNode( + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder builderForValue) { + if (nodeBuilder_ == null) { + node_ = builderForValue.build(); + } else { + nodeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode node = 1; + */ + public Builder mergeNode(org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode value) { + if (nodeBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0) && + node_ != null && + node_ != org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance()) { + getNodeBuilder().mergeFrom(value); + } else { + node_ = value; + } + } else { + nodeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode node = 1; + */ + public Builder clearNode() { + bitField0_ = (bitField0_ & ~0x00000001); + node_ = null; + if (nodeBuilder_ != null) { + nodeBuilder_.dispose(); + nodeBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.TreeNode node = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder getNodeBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getNodeFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.TreeNode node = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder getNodeOrBuilder() { + if (nodeBuilder_ != null) { + return nodeBuilder_.getMessageOrBuilder(); + } else { + return node_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.getDefaultInstance() : node_; + } + } + /** + * optional .gandiva.types.TreeNode node = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder> + getNodeFieldBuilder() { + if (nodeBuilder_ == null) { + nodeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.TreeNodeOrBuilder>( + getNode(), + getParentForChildren(), + isClean()); + node_ = null; + } + return nodeBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants intValues_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants, org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstantsOrBuilder> intValuesBuilder_; + /** + * optional .gandiva.types.IntConstants intValues = 2; + * @return Whether the intValues field is set. + */ + public boolean hasIntValues() { + return ((bitField0_ & 0x00000002) != 0); + } + /** + * optional .gandiva.types.IntConstants intValues = 2; + * @return The intValues. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants getIntValues() { + if (intValuesBuilder_ == null) { + return intValues_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants.getDefaultInstance() : intValues_; + } else { + return intValuesBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.IntConstants intValues = 2; + */ + public Builder setIntValues(org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants value) { + if (intValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + intValues_ = value; + } else { + intValuesBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * optional .gandiva.types.IntConstants intValues = 2; + */ + public Builder setIntValues( + org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants.Builder builderForValue) { + if (intValuesBuilder_ == null) { + intValues_ = builderForValue.build(); + } else { + intValuesBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * optional .gandiva.types.IntConstants intValues = 2; + */ + public Builder mergeIntValues(org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants value) { + if (intValuesBuilder_ == null) { + if (((bitField0_ & 0x00000002) != 0) && + intValues_ != null && + intValues_ != org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants.getDefaultInstance()) { + getIntValuesBuilder().mergeFrom(value); + } else { + intValues_ = value; + } + } else { + intValuesBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * optional .gandiva.types.IntConstants intValues = 2; + */ + public Builder clearIntValues() { + bitField0_ = (bitField0_ & ~0x00000002); + intValues_ = null; + if (intValuesBuilder_ != null) { + intValuesBuilder_.dispose(); + intValuesBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.IntConstants intValues = 2; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants.Builder getIntValuesBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getIntValuesFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.IntConstants intValues = 2; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstantsOrBuilder getIntValuesOrBuilder() { + if (intValuesBuilder_ != null) { + return intValuesBuilder_.getMessageOrBuilder(); + } else { + return intValues_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants.getDefaultInstance() : intValues_; + } + } + /** + * optional .gandiva.types.IntConstants intValues = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants, org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstantsOrBuilder> + getIntValuesFieldBuilder() { + if (intValuesBuilder_ == null) { + intValuesBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants, org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstantsOrBuilder>( + getIntValues(), + getParentForChildren(), + isClean()); + intValues_ = null; + } + return intValuesBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants longValues_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants, org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstantsOrBuilder> longValuesBuilder_; + /** + * optional .gandiva.types.LongConstants longValues = 3; + * @return Whether the longValues field is set. + */ + public boolean hasLongValues() { + return ((bitField0_ & 0x00000004) != 0); + } + /** + * optional .gandiva.types.LongConstants longValues = 3; + * @return The longValues. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants getLongValues() { + if (longValuesBuilder_ == null) { + return longValues_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants.getDefaultInstance() : longValues_; + } else { + return longValuesBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.LongConstants longValues = 3; + */ + public Builder setLongValues(org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants value) { + if (longValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + longValues_ = value; + } else { + longValuesBuilder_.setMessage(value); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * optional .gandiva.types.LongConstants longValues = 3; + */ + public Builder setLongValues( + org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants.Builder builderForValue) { + if (longValuesBuilder_ == null) { + longValues_ = builderForValue.build(); + } else { + longValuesBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * optional .gandiva.types.LongConstants longValues = 3; + */ + public Builder mergeLongValues(org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants value) { + if (longValuesBuilder_ == null) { + if (((bitField0_ & 0x00000004) != 0) && + longValues_ != null && + longValues_ != org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants.getDefaultInstance()) { + getLongValuesBuilder().mergeFrom(value); + } else { + longValues_ = value; + } + } else { + longValuesBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * optional .gandiva.types.LongConstants longValues = 3; + */ + public Builder clearLongValues() { + bitField0_ = (bitField0_ & ~0x00000004); + longValues_ = null; + if (longValuesBuilder_ != null) { + longValuesBuilder_.dispose(); + longValuesBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.LongConstants longValues = 3; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants.Builder getLongValuesBuilder() { + bitField0_ |= 0x00000004; + onChanged(); + return getLongValuesFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.LongConstants longValues = 3; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstantsOrBuilder getLongValuesOrBuilder() { + if (longValuesBuilder_ != null) { + return longValuesBuilder_.getMessageOrBuilder(); + } else { + return longValues_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants.getDefaultInstance() : longValues_; + } + } + /** + * optional .gandiva.types.LongConstants longValues = 3; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants, org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstantsOrBuilder> + getLongValuesFieldBuilder() { + if (longValuesBuilder_ == null) { + longValuesBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants, org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstantsOrBuilder>( + getLongValues(), + getParentForChildren(), + isClean()); + longValues_ = null; + } + return longValuesBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants stringValues_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants, org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstantsOrBuilder> stringValuesBuilder_; + /** + * optional .gandiva.types.StringConstants stringValues = 4; + * @return Whether the stringValues field is set. + */ + public boolean hasStringValues() { + return ((bitField0_ & 0x00000008) != 0); + } + /** + * optional .gandiva.types.StringConstants stringValues = 4; + * @return The stringValues. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants getStringValues() { + if (stringValuesBuilder_ == null) { + return stringValues_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants.getDefaultInstance() : stringValues_; + } else { + return stringValuesBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.StringConstants stringValues = 4; + */ + public Builder setStringValues(org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants value) { + if (stringValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + stringValues_ = value; + } else { + stringValuesBuilder_.setMessage(value); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * optional .gandiva.types.StringConstants stringValues = 4; + */ + public Builder setStringValues( + org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants.Builder builderForValue) { + if (stringValuesBuilder_ == null) { + stringValues_ = builderForValue.build(); + } else { + stringValuesBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * optional .gandiva.types.StringConstants stringValues = 4; + */ + public Builder mergeStringValues(org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants value) { + if (stringValuesBuilder_ == null) { + if (((bitField0_ & 0x00000008) != 0) && + stringValues_ != null && + stringValues_ != org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants.getDefaultInstance()) { + getStringValuesBuilder().mergeFrom(value); + } else { + stringValues_ = value; + } + } else { + stringValuesBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000008; + onChanged(); + return this; + } + /** + * optional .gandiva.types.StringConstants stringValues = 4; + */ + public Builder clearStringValues() { + bitField0_ = (bitField0_ & ~0x00000008); + stringValues_ = null; + if (stringValuesBuilder_ != null) { + stringValuesBuilder_.dispose(); + stringValuesBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.StringConstants stringValues = 4; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants.Builder getStringValuesBuilder() { + bitField0_ |= 0x00000008; + onChanged(); + return getStringValuesFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.StringConstants stringValues = 4; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstantsOrBuilder getStringValuesOrBuilder() { + if (stringValuesBuilder_ != null) { + return stringValuesBuilder_.getMessageOrBuilder(); + } else { + return stringValues_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants.getDefaultInstance() : stringValues_; + } + } + /** + * optional .gandiva.types.StringConstants stringValues = 4; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants, org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstantsOrBuilder> + getStringValuesFieldBuilder() { + if (stringValuesBuilder_ == null) { + stringValuesBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants, org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstantsOrBuilder>( + getStringValues(), + getParentForChildren(), + isClean()); + stringValues_ = null; + } + return stringValuesBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants binaryValues_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstantsOrBuilder> binaryValuesBuilder_; + /** + * optional .gandiva.types.BinaryConstants binaryValues = 5; + * @return Whether the binaryValues field is set. + */ + public boolean hasBinaryValues() { + return ((bitField0_ & 0x00000010) != 0); + } + /** + * optional .gandiva.types.BinaryConstants binaryValues = 5; + * @return The binaryValues. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants getBinaryValues() { + if (binaryValuesBuilder_ == null) { + return binaryValues_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants.getDefaultInstance() : binaryValues_; + } else { + return binaryValuesBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.BinaryConstants binaryValues = 5; + */ + public Builder setBinaryValues(org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants value) { + if (binaryValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + binaryValues_ = value; + } else { + binaryValuesBuilder_.setMessage(value); + } + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + * optional .gandiva.types.BinaryConstants binaryValues = 5; + */ + public Builder setBinaryValues( + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants.Builder builderForValue) { + if (binaryValuesBuilder_ == null) { + binaryValues_ = builderForValue.build(); + } else { + binaryValuesBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + * optional .gandiva.types.BinaryConstants binaryValues = 5; + */ + public Builder mergeBinaryValues(org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants value) { + if (binaryValuesBuilder_ == null) { + if (((bitField0_ & 0x00000010) != 0) && + binaryValues_ != null && + binaryValues_ != org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants.getDefaultInstance()) { + getBinaryValuesBuilder().mergeFrom(value); + } else { + binaryValues_ = value; + } + } else { + binaryValuesBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000010; + onChanged(); + return this; + } + /** + * optional .gandiva.types.BinaryConstants binaryValues = 5; + */ + public Builder clearBinaryValues() { + bitField0_ = (bitField0_ & ~0x00000010); + binaryValues_ = null; + if (binaryValuesBuilder_ != null) { + binaryValuesBuilder_.dispose(); + binaryValuesBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.BinaryConstants binaryValues = 5; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants.Builder getBinaryValuesBuilder() { + bitField0_ |= 0x00000010; + onChanged(); + return getBinaryValuesFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.BinaryConstants binaryValues = 5; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstantsOrBuilder getBinaryValuesOrBuilder() { + if (binaryValuesBuilder_ != null) { + return binaryValuesBuilder_.getMessageOrBuilder(); + } else { + return binaryValues_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants.getDefaultInstance() : binaryValues_; + } + } + /** + * optional .gandiva.types.BinaryConstants binaryValues = 5; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstantsOrBuilder> + getBinaryValuesFieldBuilder() { + if (binaryValuesBuilder_ == null) { + binaryValuesBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstantsOrBuilder>( + getBinaryValues(), + getParentForChildren(), + isClean()); + binaryValues_ = null; + } + return binaryValuesBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants decimalValues_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstantsOrBuilder> decimalValuesBuilder_; + /** + * optional .gandiva.types.DecimalConstants decimalValues = 6; + * @return Whether the decimalValues field is set. + */ + public boolean hasDecimalValues() { + return ((bitField0_ & 0x00000020) != 0); + } + /** + * optional .gandiva.types.DecimalConstants decimalValues = 6; + * @return The decimalValues. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants getDecimalValues() { + if (decimalValuesBuilder_ == null) { + return decimalValues_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants.getDefaultInstance() : decimalValues_; + } else { + return decimalValuesBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.DecimalConstants decimalValues = 6; + */ + public Builder setDecimalValues(org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants value) { + if (decimalValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + decimalValues_ = value; + } else { + decimalValuesBuilder_.setMessage(value); + } + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + /** + * optional .gandiva.types.DecimalConstants decimalValues = 6; + */ + public Builder setDecimalValues( + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants.Builder builderForValue) { + if (decimalValuesBuilder_ == null) { + decimalValues_ = builderForValue.build(); + } else { + decimalValuesBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + /** + * optional .gandiva.types.DecimalConstants decimalValues = 6; + */ + public Builder mergeDecimalValues(org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants value) { + if (decimalValuesBuilder_ == null) { + if (((bitField0_ & 0x00000020) != 0) && + decimalValues_ != null && + decimalValues_ != org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants.getDefaultInstance()) { + getDecimalValuesBuilder().mergeFrom(value); + } else { + decimalValues_ = value; + } + } else { + decimalValuesBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000020; + onChanged(); + return this; + } + /** + * optional .gandiva.types.DecimalConstants decimalValues = 6; + */ + public Builder clearDecimalValues() { + bitField0_ = (bitField0_ & ~0x00000020); + decimalValues_ = null; + if (decimalValuesBuilder_ != null) { + decimalValuesBuilder_.dispose(); + decimalValuesBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.DecimalConstants decimalValues = 6; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants.Builder getDecimalValuesBuilder() { + bitField0_ |= 0x00000020; + onChanged(); + return getDecimalValuesFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.DecimalConstants decimalValues = 6; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstantsOrBuilder getDecimalValuesOrBuilder() { + if (decimalValuesBuilder_ != null) { + return decimalValuesBuilder_.getMessageOrBuilder(); + } else { + return decimalValues_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants.getDefaultInstance() : decimalValues_; + } + } + /** + * optional .gandiva.types.DecimalConstants decimalValues = 6; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstantsOrBuilder> + getDecimalValuesFieldBuilder() { + if (decimalValuesBuilder_ == null) { + decimalValuesBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstantsOrBuilder>( + getDecimalValues(), + getParentForChildren(), + isClean()); + decimalValues_ = null; + } + return decimalValuesBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants floatValues_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstantsOrBuilder> floatValuesBuilder_; + /** + * optional .gandiva.types.FloatConstants floatValues = 7; + * @return Whether the floatValues field is set. + */ + public boolean hasFloatValues() { + return ((bitField0_ & 0x00000040) != 0); + } + /** + * optional .gandiva.types.FloatConstants floatValues = 7; + * @return The floatValues. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants getFloatValues() { + if (floatValuesBuilder_ == null) { + return floatValues_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants.getDefaultInstance() : floatValues_; + } else { + return floatValuesBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.FloatConstants floatValues = 7; + */ + public Builder setFloatValues(org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants value) { + if (floatValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + floatValues_ = value; + } else { + floatValuesBuilder_.setMessage(value); + } + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + /** + * optional .gandiva.types.FloatConstants floatValues = 7; + */ + public Builder setFloatValues( + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants.Builder builderForValue) { + if (floatValuesBuilder_ == null) { + floatValues_ = builderForValue.build(); + } else { + floatValuesBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + /** + * optional .gandiva.types.FloatConstants floatValues = 7; + */ + public Builder mergeFloatValues(org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants value) { + if (floatValuesBuilder_ == null) { + if (((bitField0_ & 0x00000040) != 0) && + floatValues_ != null && + floatValues_ != org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants.getDefaultInstance()) { + getFloatValuesBuilder().mergeFrom(value); + } else { + floatValues_ = value; + } + } else { + floatValuesBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000040; + onChanged(); + return this; + } + /** + * optional .gandiva.types.FloatConstants floatValues = 7; + */ + public Builder clearFloatValues() { + bitField0_ = (bitField0_ & ~0x00000040); + floatValues_ = null; + if (floatValuesBuilder_ != null) { + floatValuesBuilder_.dispose(); + floatValuesBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.FloatConstants floatValues = 7; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants.Builder getFloatValuesBuilder() { + bitField0_ |= 0x00000040; + onChanged(); + return getFloatValuesFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.FloatConstants floatValues = 7; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstantsOrBuilder getFloatValuesOrBuilder() { + if (floatValuesBuilder_ != null) { + return floatValuesBuilder_.getMessageOrBuilder(); + } else { + return floatValues_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants.getDefaultInstance() : floatValues_; + } + } + /** + * optional .gandiva.types.FloatConstants floatValues = 7; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstantsOrBuilder> + getFloatValuesFieldBuilder() { + if (floatValuesBuilder_ == null) { + floatValuesBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstantsOrBuilder>( + getFloatValues(), + getParentForChildren(), + isClean()); + floatValues_ = null; + } + return floatValuesBuilder_; + } + + private org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants doubleValues_; + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstantsOrBuilder> doubleValuesBuilder_; + /** + * optional .gandiva.types.DoubleConstants doubleValues = 8; + * @return Whether the doubleValues field is set. + */ + public boolean hasDoubleValues() { + return ((bitField0_ & 0x00000080) != 0); + } + /** + * optional .gandiva.types.DoubleConstants doubleValues = 8; + * @return The doubleValues. + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants getDoubleValues() { + if (doubleValuesBuilder_ == null) { + return doubleValues_ == null ? org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants.getDefaultInstance() : doubleValues_; + } else { + return doubleValuesBuilder_.getMessage(); + } + } + /** + * optional .gandiva.types.DoubleConstants doubleValues = 8; + */ + public Builder setDoubleValues(org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants value) { + if (doubleValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + doubleValues_ = value; + } else { + doubleValuesBuilder_.setMessage(value); + } + bitField0_ |= 0x00000080; + onChanged(); + return this; + } + /** + * optional .gandiva.types.DoubleConstants doubleValues = 8; + */ + public Builder setDoubleValues( + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants.Builder builderForValue) { + if (doubleValuesBuilder_ == null) { + doubleValues_ = builderForValue.build(); + } else { + doubleValuesBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000080; + onChanged(); + return this; + } + /** + * optional .gandiva.types.DoubleConstants doubleValues = 8; + */ + public Builder mergeDoubleValues(org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants value) { + if (doubleValuesBuilder_ == null) { + if (((bitField0_ & 0x00000080) != 0) && + doubleValues_ != null && + doubleValues_ != org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants.getDefaultInstance()) { + getDoubleValuesBuilder().mergeFrom(value); + } else { + doubleValues_ = value; + } + } else { + doubleValuesBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000080; + onChanged(); + return this; + } + /** + * optional .gandiva.types.DoubleConstants doubleValues = 8; + */ + public Builder clearDoubleValues() { + bitField0_ = (bitField0_ & ~0x00000080); + doubleValues_ = null; + if (doubleValuesBuilder_ != null) { + doubleValuesBuilder_.dispose(); + doubleValuesBuilder_ = null; + } + onChanged(); + return this; + } + /** + * optional .gandiva.types.DoubleConstants doubleValues = 8; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants.Builder getDoubleValuesBuilder() { + bitField0_ |= 0x00000080; + onChanged(); + return getDoubleValuesFieldBuilder().getBuilder(); + } + /** + * optional .gandiva.types.DoubleConstants doubleValues = 8; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstantsOrBuilder getDoubleValuesOrBuilder() { + if (doubleValuesBuilder_ != null) { + return doubleValuesBuilder_.getMessageOrBuilder(); + } else { + return doubleValues_ == null ? + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants.getDefaultInstance() : doubleValues_; + } + } + /** + * optional .gandiva.types.DoubleConstants doubleValues = 8; + */ + private com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstantsOrBuilder> + getDoubleValuesFieldBuilder() { + if (doubleValuesBuilder_ == null) { + doubleValuesBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstantsOrBuilder>( + getDoubleValues(), + getParentForChildren(), + isClean()); + doubleValues_ = null; + } + return doubleValuesBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.InNode) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.InNode) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.InNode DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.InNode(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.InNode getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public InNode parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.InNode getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface IntConstantsOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.IntConstants) + com.google.protobuf.MessageOrBuilder { + + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + java.util.List + getIntValuesList(); + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode getIntValues(int index); + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + int getIntValuesCount(); + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + java.util.List + getIntValuesOrBuilderList(); + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.IntNodeOrBuilder getIntValuesOrBuilder( + int index); + } + /** + * Protobuf type {@code gandiva.types.IntConstants} + */ + public static final class IntConstants extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.IntConstants) + IntConstantsOrBuilder { + private static final long serialVersionUID = 0L; + // Use IntConstants.newBuilder() to construct. + private IntConstants(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private IntConstants() { + intValues_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new IntConstants(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_IntConstants_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_IntConstants_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants.class, org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants.Builder.class); + } + + public static final int INTVALUES_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private java.util.List intValues_; + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + @java.lang.Override + public java.util.List getIntValuesList() { + return intValues_; + } + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + @java.lang.Override + public java.util.List + getIntValuesOrBuilderList() { + return intValues_; + } + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + @java.lang.Override + public int getIntValuesCount() { + return intValues_.size(); + } + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode getIntValues(int index) { + return intValues_.get(index); + } + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntNodeOrBuilder getIntValuesOrBuilder( + int index) { + return intValues_.get(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < intValues_.size(); i++) { + output.writeMessage(1, intValues_.get(i)); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < intValues_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, intValues_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants other = (org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants) obj; + + if (!getIntValuesList() + .equals(other.getIntValuesList())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getIntValuesCount() > 0) { + hash = (37 * hash) + INTVALUES_FIELD_NUMBER; + hash = (53 * hash) + getIntValuesList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.IntConstants} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.IntConstants) + org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstantsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_IntConstants_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_IntConstants_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants.class, org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + if (intValuesBuilder_ == null) { + intValues_ = java.util.Collections.emptyList(); + } else { + intValues_ = null; + intValuesBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_IntConstants_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants result = new org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields(org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants result) { + if (intValuesBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + intValues_ = java.util.Collections.unmodifiableList(intValues_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.intValues_ = intValues_; + } else { + result.intValues_ = intValuesBuilder_.build(); + } + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants result) { + int from_bitField0_ = bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants.getDefaultInstance()) return this; + if (intValuesBuilder_ == null) { + if (!other.intValues_.isEmpty()) { + if (intValues_.isEmpty()) { + intValues_ = other.intValues_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureIntValuesIsMutable(); + intValues_.addAll(other.intValues_); + } + onChanged(); + } + } else { + if (!other.intValues_.isEmpty()) { + if (intValuesBuilder_.isEmpty()) { + intValuesBuilder_.dispose(); + intValuesBuilder_ = null; + intValues_ = other.intValues_; + bitField0_ = (bitField0_ & ~0x00000001); + intValuesBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getIntValuesFieldBuilder() : null; + } else { + intValuesBuilder_.addAllMessages(other.intValues_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode m = + input.readMessage( + org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.parser(), + extensionRegistry); + if (intValuesBuilder_ == null) { + ensureIntValuesIsMutable(); + intValues_.add(m); + } else { + intValuesBuilder_.addMessage(m); + } + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.util.List intValues_ = + java.util.Collections.emptyList(); + private void ensureIntValuesIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + intValues_ = new java.util.ArrayList(intValues_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode, org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.IntNodeOrBuilder> intValuesBuilder_; + + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + public java.util.List getIntValuesList() { + if (intValuesBuilder_ == null) { + return java.util.Collections.unmodifiableList(intValues_); + } else { + return intValuesBuilder_.getMessageList(); + } + } + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + public int getIntValuesCount() { + if (intValuesBuilder_ == null) { + return intValues_.size(); + } else { + return intValuesBuilder_.getCount(); + } + } + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode getIntValues(int index) { + if (intValuesBuilder_ == null) { + return intValues_.get(index); + } else { + return intValuesBuilder_.getMessage(index); + } + } + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + public Builder setIntValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode value) { + if (intValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureIntValuesIsMutable(); + intValues_.set(index, value); + onChanged(); + } else { + intValuesBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + public Builder setIntValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.Builder builderForValue) { + if (intValuesBuilder_ == null) { + ensureIntValuesIsMutable(); + intValues_.set(index, builderForValue.build()); + onChanged(); + } else { + intValuesBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + public Builder addIntValues(org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode value) { + if (intValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureIntValuesIsMutable(); + intValues_.add(value); + onChanged(); + } else { + intValuesBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + public Builder addIntValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode value) { + if (intValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureIntValuesIsMutable(); + intValues_.add(index, value); + onChanged(); + } else { + intValuesBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + public Builder addIntValues( + org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.Builder builderForValue) { + if (intValuesBuilder_ == null) { + ensureIntValuesIsMutable(); + intValues_.add(builderForValue.build()); + onChanged(); + } else { + intValuesBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + public Builder addIntValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.Builder builderForValue) { + if (intValuesBuilder_ == null) { + ensureIntValuesIsMutable(); + intValues_.add(index, builderForValue.build()); + onChanged(); + } else { + intValuesBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + public Builder addAllIntValues( + java.lang.Iterable values) { + if (intValuesBuilder_ == null) { + ensureIntValuesIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, intValues_); + onChanged(); + } else { + intValuesBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + public Builder clearIntValues() { + if (intValuesBuilder_ == null) { + intValues_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + intValuesBuilder_.clear(); + } + return this; + } + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + public Builder removeIntValues(int index) { + if (intValuesBuilder_ == null) { + ensureIntValuesIsMutable(); + intValues_.remove(index); + onChanged(); + } else { + intValuesBuilder_.remove(index); + } + return this; + } + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.Builder getIntValuesBuilder( + int index) { + return getIntValuesFieldBuilder().getBuilder(index); + } + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntNodeOrBuilder getIntValuesOrBuilder( + int index) { + if (intValuesBuilder_ == null) { + return intValues_.get(index); } else { + return intValuesBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + public java.util.List + getIntValuesOrBuilderList() { + if (intValuesBuilder_ != null) { + return intValuesBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(intValues_); + } + } + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.Builder addIntValuesBuilder() { + return getIntValuesFieldBuilder().addBuilder( + org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.getDefaultInstance()); + } + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.Builder addIntValuesBuilder( + int index) { + return getIntValuesFieldBuilder().addBuilder( + index, org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.getDefaultInstance()); + } + /** + * repeated .gandiva.types.IntNode intValues = 1; + */ + public java.util.List + getIntValuesBuilderList() { + return getIntValuesFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode, org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.IntNodeOrBuilder> + getIntValuesFieldBuilder() { + if (intValuesBuilder_ == null) { + intValuesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode, org.apache.arrow.gandiva.ipc.GandivaTypes.IntNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.IntNodeOrBuilder>( + intValues_, + ((bitField0_ & 0x00000001) != 0), + getParentForChildren(), + isClean()); + intValues_ = null; + } + return intValuesBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.IntConstants) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.IntConstants) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public IntConstants parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.IntConstants getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface LongConstantsOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.LongConstants) + com.google.protobuf.MessageOrBuilder { + + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + java.util.List + getLongValuesList(); + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode getLongValues(int index); + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + int getLongValuesCount(); + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + java.util.List + getLongValuesOrBuilderList(); + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.LongNodeOrBuilder getLongValuesOrBuilder( + int index); + } + /** + * Protobuf type {@code gandiva.types.LongConstants} + */ + public static final class LongConstants extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.LongConstants) + LongConstantsOrBuilder { + private static final long serialVersionUID = 0L; + // Use LongConstants.newBuilder() to construct. + private LongConstants(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private LongConstants() { + longValues_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new LongConstants(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_LongConstants_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_LongConstants_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants.class, org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants.Builder.class); + } + + public static final int LONGVALUES_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private java.util.List longValues_; + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + @java.lang.Override + public java.util.List getLongValuesList() { + return longValues_; + } + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + @java.lang.Override + public java.util.List + getLongValuesOrBuilderList() { + return longValues_; + } + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + @java.lang.Override + public int getLongValuesCount() { + return longValues_.size(); + } + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode getLongValues(int index) { + return longValues_.get(index); + } + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongNodeOrBuilder getLongValuesOrBuilder( + int index) { + return longValues_.get(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < longValues_.size(); i++) { + output.writeMessage(1, longValues_.get(i)); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < longValues_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, longValues_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants other = (org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants) obj; + + if (!getLongValuesList() + .equals(other.getLongValuesList())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getLongValuesCount() > 0) { + hash = (37 * hash) + LONGVALUES_FIELD_NUMBER; + hash = (53 * hash) + getLongValuesList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.LongConstants} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.LongConstants) + org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstantsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_LongConstants_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_LongConstants_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants.class, org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + if (longValuesBuilder_ == null) { + longValues_ = java.util.Collections.emptyList(); + } else { + longValues_ = null; + longValuesBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_LongConstants_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants result = new org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields(org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants result) { + if (longValuesBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + longValues_ = java.util.Collections.unmodifiableList(longValues_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.longValues_ = longValues_; + } else { + result.longValues_ = longValuesBuilder_.build(); + } + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants result) { + int from_bitField0_ = bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants.getDefaultInstance()) return this; + if (longValuesBuilder_ == null) { + if (!other.longValues_.isEmpty()) { + if (longValues_.isEmpty()) { + longValues_ = other.longValues_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureLongValuesIsMutable(); + longValues_.addAll(other.longValues_); + } + onChanged(); + } + } else { + if (!other.longValues_.isEmpty()) { + if (longValuesBuilder_.isEmpty()) { + longValuesBuilder_.dispose(); + longValuesBuilder_ = null; + longValues_ = other.longValues_; + bitField0_ = (bitField0_ & ~0x00000001); + longValuesBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getLongValuesFieldBuilder() : null; + } else { + longValuesBuilder_.addAllMessages(other.longValues_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode m = + input.readMessage( + org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.parser(), + extensionRegistry); + if (longValuesBuilder_ == null) { + ensureLongValuesIsMutable(); + longValues_.add(m); + } else { + longValuesBuilder_.addMessage(m); + } + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.util.List longValues_ = + java.util.Collections.emptyList(); + private void ensureLongValuesIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + longValues_ = new java.util.ArrayList(longValues_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode, org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.LongNodeOrBuilder> longValuesBuilder_; + + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + public java.util.List getLongValuesList() { + if (longValuesBuilder_ == null) { + return java.util.Collections.unmodifiableList(longValues_); + } else { + return longValuesBuilder_.getMessageList(); + } + } + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + public int getLongValuesCount() { + if (longValuesBuilder_ == null) { + return longValues_.size(); + } else { + return longValuesBuilder_.getCount(); + } + } + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode getLongValues(int index) { + if (longValuesBuilder_ == null) { + return longValues_.get(index); + } else { + return longValuesBuilder_.getMessage(index); + } + } + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + public Builder setLongValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode value) { + if (longValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureLongValuesIsMutable(); + longValues_.set(index, value); + onChanged(); + } else { + longValuesBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + public Builder setLongValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.Builder builderForValue) { + if (longValuesBuilder_ == null) { + ensureLongValuesIsMutable(); + longValues_.set(index, builderForValue.build()); + onChanged(); + } else { + longValuesBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + public Builder addLongValues(org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode value) { + if (longValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureLongValuesIsMutable(); + longValues_.add(value); + onChanged(); + } else { + longValuesBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + public Builder addLongValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode value) { + if (longValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureLongValuesIsMutable(); + longValues_.add(index, value); + onChanged(); + } else { + longValuesBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + public Builder addLongValues( + org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.Builder builderForValue) { + if (longValuesBuilder_ == null) { + ensureLongValuesIsMutable(); + longValues_.add(builderForValue.build()); + onChanged(); + } else { + longValuesBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + public Builder addLongValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.Builder builderForValue) { + if (longValuesBuilder_ == null) { + ensureLongValuesIsMutable(); + longValues_.add(index, builderForValue.build()); + onChanged(); + } else { + longValuesBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + public Builder addAllLongValues( + java.lang.Iterable values) { + if (longValuesBuilder_ == null) { + ensureLongValuesIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, longValues_); + onChanged(); + } else { + longValuesBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + public Builder clearLongValues() { + if (longValuesBuilder_ == null) { + longValues_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + longValuesBuilder_.clear(); + } + return this; + } + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + public Builder removeLongValues(int index) { + if (longValuesBuilder_ == null) { + ensureLongValuesIsMutable(); + longValues_.remove(index); + onChanged(); + } else { + longValuesBuilder_.remove(index); + } + return this; + } + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.Builder getLongValuesBuilder( + int index) { + return getLongValuesFieldBuilder().getBuilder(index); + } + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongNodeOrBuilder getLongValuesOrBuilder( + int index) { + if (longValuesBuilder_ == null) { + return longValues_.get(index); } else { + return longValuesBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + public java.util.List + getLongValuesOrBuilderList() { + if (longValuesBuilder_ != null) { + return longValuesBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(longValues_); + } + } + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.Builder addLongValuesBuilder() { + return getLongValuesFieldBuilder().addBuilder( + org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.getDefaultInstance()); + } + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.Builder addLongValuesBuilder( + int index) { + return getLongValuesFieldBuilder().addBuilder( + index, org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.getDefaultInstance()); + } + /** + * repeated .gandiva.types.LongNode longValues = 1; + */ + public java.util.List + getLongValuesBuilderList() { + return getLongValuesFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode, org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.LongNodeOrBuilder> + getLongValuesFieldBuilder() { + if (longValuesBuilder_ == null) { + longValuesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode, org.apache.arrow.gandiva.ipc.GandivaTypes.LongNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.LongNodeOrBuilder>( + longValues_, + ((bitField0_ & 0x00000001) != 0), + getParentForChildren(), + isClean()); + longValues_ = null; + } + return longValuesBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.LongConstants) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.LongConstants) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public LongConstants parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.LongConstants getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface DecimalConstantsOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.DecimalConstants) + com.google.protobuf.MessageOrBuilder { + + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + java.util.List + getDecimalValuesList(); + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode getDecimalValues(int index); + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + int getDecimalValuesCount(); + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + java.util.List + getDecimalValuesOrBuilderList(); + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNodeOrBuilder getDecimalValuesOrBuilder( + int index); + } + /** + * Protobuf type {@code gandiva.types.DecimalConstants} + */ + public static final class DecimalConstants extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.DecimalConstants) + DecimalConstantsOrBuilder { + private static final long serialVersionUID = 0L; + // Use DecimalConstants.newBuilder() to construct. + private DecimalConstants(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private DecimalConstants() { + decimalValues_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new DecimalConstants(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_DecimalConstants_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_DecimalConstants_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants.class, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants.Builder.class); + } + + public static final int DECIMALVALUES_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private java.util.List decimalValues_; + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + @java.lang.Override + public java.util.List getDecimalValuesList() { + return decimalValues_; + } + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + @java.lang.Override + public java.util.List + getDecimalValuesOrBuilderList() { + return decimalValues_; + } + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + @java.lang.Override + public int getDecimalValuesCount() { + return decimalValues_.size(); + } + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode getDecimalValues(int index) { + return decimalValues_.get(index); + } + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNodeOrBuilder getDecimalValuesOrBuilder( + int index) { + return decimalValues_.get(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < decimalValues_.size(); i++) { + output.writeMessage(1, decimalValues_.get(i)); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < decimalValues_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, decimalValues_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants other = (org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants) obj; + + if (!getDecimalValuesList() + .equals(other.getDecimalValuesList())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getDecimalValuesCount() > 0) { + hash = (37 * hash) + DECIMALVALUES_FIELD_NUMBER; + hash = (53 * hash) + getDecimalValuesList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.DecimalConstants} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.DecimalConstants) + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstantsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_DecimalConstants_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_DecimalConstants_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants.class, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + if (decimalValuesBuilder_ == null) { + decimalValues_ = java.util.Collections.emptyList(); + } else { + decimalValues_ = null; + decimalValuesBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_DecimalConstants_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants result = new org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields(org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants result) { + if (decimalValuesBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + decimalValues_ = java.util.Collections.unmodifiableList(decimalValues_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.decimalValues_ = decimalValues_; + } else { + result.decimalValues_ = decimalValuesBuilder_.build(); + } + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants result) { + int from_bitField0_ = bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants.getDefaultInstance()) return this; + if (decimalValuesBuilder_ == null) { + if (!other.decimalValues_.isEmpty()) { + if (decimalValues_.isEmpty()) { + decimalValues_ = other.decimalValues_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureDecimalValuesIsMutable(); + decimalValues_.addAll(other.decimalValues_); + } + onChanged(); + } + } else { + if (!other.decimalValues_.isEmpty()) { + if (decimalValuesBuilder_.isEmpty()) { + decimalValuesBuilder_.dispose(); + decimalValuesBuilder_ = null; + decimalValues_ = other.decimalValues_; + bitField0_ = (bitField0_ & ~0x00000001); + decimalValuesBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getDecimalValuesFieldBuilder() : null; + } else { + decimalValuesBuilder_.addAllMessages(other.decimalValues_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode m = + input.readMessage( + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.parser(), + extensionRegistry); + if (decimalValuesBuilder_ == null) { + ensureDecimalValuesIsMutable(); + decimalValues_.add(m); + } else { + decimalValuesBuilder_.addMessage(m); + } + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.util.List decimalValues_ = + java.util.Collections.emptyList(); + private void ensureDecimalValuesIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + decimalValues_ = new java.util.ArrayList(decimalValues_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNodeOrBuilder> decimalValuesBuilder_; + + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + public java.util.List getDecimalValuesList() { + if (decimalValuesBuilder_ == null) { + return java.util.Collections.unmodifiableList(decimalValues_); + } else { + return decimalValuesBuilder_.getMessageList(); + } + } + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + public int getDecimalValuesCount() { + if (decimalValuesBuilder_ == null) { + return decimalValues_.size(); + } else { + return decimalValuesBuilder_.getCount(); + } + } + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode getDecimalValues(int index) { + if (decimalValuesBuilder_ == null) { + return decimalValues_.get(index); + } else { + return decimalValuesBuilder_.getMessage(index); + } + } + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + public Builder setDecimalValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode value) { + if (decimalValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDecimalValuesIsMutable(); + decimalValues_.set(index, value); + onChanged(); + } else { + decimalValuesBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + public Builder setDecimalValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.Builder builderForValue) { + if (decimalValuesBuilder_ == null) { + ensureDecimalValuesIsMutable(); + decimalValues_.set(index, builderForValue.build()); + onChanged(); + } else { + decimalValuesBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + public Builder addDecimalValues(org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode value) { + if (decimalValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDecimalValuesIsMutable(); + decimalValues_.add(value); + onChanged(); + } else { + decimalValuesBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + public Builder addDecimalValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode value) { + if (decimalValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDecimalValuesIsMutable(); + decimalValues_.add(index, value); + onChanged(); + } else { + decimalValuesBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + public Builder addDecimalValues( + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.Builder builderForValue) { + if (decimalValuesBuilder_ == null) { + ensureDecimalValuesIsMutable(); + decimalValues_.add(builderForValue.build()); + onChanged(); + } else { + decimalValuesBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + public Builder addDecimalValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.Builder builderForValue) { + if (decimalValuesBuilder_ == null) { + ensureDecimalValuesIsMutable(); + decimalValues_.add(index, builderForValue.build()); + onChanged(); + } else { + decimalValuesBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + public Builder addAllDecimalValues( + java.lang.Iterable values) { + if (decimalValuesBuilder_ == null) { + ensureDecimalValuesIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, decimalValues_); + onChanged(); + } else { + decimalValuesBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + public Builder clearDecimalValues() { + if (decimalValuesBuilder_ == null) { + decimalValues_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + decimalValuesBuilder_.clear(); + } + return this; + } + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + public Builder removeDecimalValues(int index) { + if (decimalValuesBuilder_ == null) { + ensureDecimalValuesIsMutable(); + decimalValues_.remove(index); + onChanged(); + } else { + decimalValuesBuilder_.remove(index); + } + return this; + } + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.Builder getDecimalValuesBuilder( + int index) { + return getDecimalValuesFieldBuilder().getBuilder(index); + } + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNodeOrBuilder getDecimalValuesOrBuilder( + int index) { + if (decimalValuesBuilder_ == null) { + return decimalValues_.get(index); } else { + return decimalValuesBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + public java.util.List + getDecimalValuesOrBuilderList() { + if (decimalValuesBuilder_ != null) { + return decimalValuesBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(decimalValues_); + } + } + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.Builder addDecimalValuesBuilder() { + return getDecimalValuesFieldBuilder().addBuilder( + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.getDefaultInstance()); + } + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.Builder addDecimalValuesBuilder( + int index) { + return getDecimalValuesFieldBuilder().addBuilder( + index, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.getDefaultInstance()); + } + /** + * repeated .gandiva.types.DecimalNode decimalValues = 1; + */ + public java.util.List + getDecimalValuesBuilderList() { + return getDecimalValuesFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNodeOrBuilder> + getDecimalValuesFieldBuilder() { + if (decimalValuesBuilder_ == null) { + decimalValuesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalNodeOrBuilder>( + decimalValues_, + ((bitField0_ & 0x00000001) != 0), + getParentForChildren(), + isClean()); + decimalValues_ = null; + } + return decimalValuesBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.DecimalConstants) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.DecimalConstants) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public DecimalConstants parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DecimalConstants getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface FloatConstantsOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.FloatConstants) + com.google.protobuf.MessageOrBuilder { + + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + java.util.List + getFloatValuesList(); + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode getFloatValues(int index); + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + int getFloatValuesCount(); + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + java.util.List + getFloatValuesOrBuilderList(); + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNodeOrBuilder getFloatValuesOrBuilder( + int index); + } + /** + * Protobuf type {@code gandiva.types.FloatConstants} + */ + public static final class FloatConstants extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.FloatConstants) + FloatConstantsOrBuilder { + private static final long serialVersionUID = 0L; + // Use FloatConstants.newBuilder() to construct. + private FloatConstants(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private FloatConstants() { + floatValues_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new FloatConstants(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FloatConstants_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FloatConstants_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants.class, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants.Builder.class); + } + + public static final int FLOATVALUES_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private java.util.List floatValues_; + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + @java.lang.Override + public java.util.List getFloatValuesList() { + return floatValues_; + } + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + @java.lang.Override + public java.util.List + getFloatValuesOrBuilderList() { + return floatValues_; + } + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + @java.lang.Override + public int getFloatValuesCount() { + return floatValues_.size(); + } + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode getFloatValues(int index) { + return floatValues_.get(index); + } + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNodeOrBuilder getFloatValuesOrBuilder( + int index) { + return floatValues_.get(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < floatValues_.size(); i++) { + output.writeMessage(1, floatValues_.get(i)); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < floatValues_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, floatValues_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants other = (org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants) obj; + + if (!getFloatValuesList() + .equals(other.getFloatValuesList())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getFloatValuesCount() > 0) { + hash = (37 * hash) + FLOATVALUES_FIELD_NUMBER; + hash = (53 * hash) + getFloatValuesList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.FloatConstants} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.FloatConstants) + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstantsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FloatConstants_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FloatConstants_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants.class, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + if (floatValuesBuilder_ == null) { + floatValues_ = java.util.Collections.emptyList(); + } else { + floatValues_ = null; + floatValuesBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_FloatConstants_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants result = new org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields(org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants result) { + if (floatValuesBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + floatValues_ = java.util.Collections.unmodifiableList(floatValues_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.floatValues_ = floatValues_; + } else { + result.floatValues_ = floatValuesBuilder_.build(); + } + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants result) { + int from_bitField0_ = bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants.getDefaultInstance()) return this; + if (floatValuesBuilder_ == null) { + if (!other.floatValues_.isEmpty()) { + if (floatValues_.isEmpty()) { + floatValues_ = other.floatValues_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureFloatValuesIsMutable(); + floatValues_.addAll(other.floatValues_); + } + onChanged(); + } + } else { + if (!other.floatValues_.isEmpty()) { + if (floatValuesBuilder_.isEmpty()) { + floatValuesBuilder_.dispose(); + floatValuesBuilder_ = null; + floatValues_ = other.floatValues_; + bitField0_ = (bitField0_ & ~0x00000001); + floatValuesBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getFloatValuesFieldBuilder() : null; + } else { + floatValuesBuilder_.addAllMessages(other.floatValues_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode m = + input.readMessage( + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.parser(), + extensionRegistry); + if (floatValuesBuilder_ == null) { + ensureFloatValuesIsMutable(); + floatValues_.add(m); + } else { + floatValuesBuilder_.addMessage(m); + } + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.util.List floatValues_ = + java.util.Collections.emptyList(); + private void ensureFloatValuesIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + floatValues_ = new java.util.ArrayList(floatValues_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNodeOrBuilder> floatValuesBuilder_; + + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + public java.util.List getFloatValuesList() { + if (floatValuesBuilder_ == null) { + return java.util.Collections.unmodifiableList(floatValues_); + } else { + return floatValuesBuilder_.getMessageList(); + } + } + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + public int getFloatValuesCount() { + if (floatValuesBuilder_ == null) { + return floatValues_.size(); + } else { + return floatValuesBuilder_.getCount(); + } + } + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode getFloatValues(int index) { + if (floatValuesBuilder_ == null) { + return floatValues_.get(index); + } else { + return floatValuesBuilder_.getMessage(index); + } + } + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + public Builder setFloatValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode value) { + if (floatValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFloatValuesIsMutable(); + floatValues_.set(index, value); + onChanged(); + } else { + floatValuesBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + public Builder setFloatValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.Builder builderForValue) { + if (floatValuesBuilder_ == null) { + ensureFloatValuesIsMutable(); + floatValues_.set(index, builderForValue.build()); + onChanged(); + } else { + floatValuesBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + public Builder addFloatValues(org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode value) { + if (floatValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFloatValuesIsMutable(); + floatValues_.add(value); + onChanged(); + } else { + floatValuesBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + public Builder addFloatValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode value) { + if (floatValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFloatValuesIsMutable(); + floatValues_.add(index, value); + onChanged(); + } else { + floatValuesBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + public Builder addFloatValues( + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.Builder builderForValue) { + if (floatValuesBuilder_ == null) { + ensureFloatValuesIsMutable(); + floatValues_.add(builderForValue.build()); + onChanged(); + } else { + floatValuesBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + public Builder addFloatValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.Builder builderForValue) { + if (floatValuesBuilder_ == null) { + ensureFloatValuesIsMutable(); + floatValues_.add(index, builderForValue.build()); + onChanged(); + } else { + floatValuesBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + public Builder addAllFloatValues( + java.lang.Iterable values) { + if (floatValuesBuilder_ == null) { + ensureFloatValuesIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, floatValues_); + onChanged(); + } else { + floatValuesBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + public Builder clearFloatValues() { + if (floatValuesBuilder_ == null) { + floatValues_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + floatValuesBuilder_.clear(); + } + return this; + } + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + public Builder removeFloatValues(int index) { + if (floatValuesBuilder_ == null) { + ensureFloatValuesIsMutable(); + floatValues_.remove(index); + onChanged(); + } else { + floatValuesBuilder_.remove(index); + } + return this; + } + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.Builder getFloatValuesBuilder( + int index) { + return getFloatValuesFieldBuilder().getBuilder(index); + } + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNodeOrBuilder getFloatValuesOrBuilder( + int index) { + if (floatValuesBuilder_ == null) { + return floatValues_.get(index); } else { + return floatValuesBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + public java.util.List + getFloatValuesOrBuilderList() { + if (floatValuesBuilder_ != null) { + return floatValuesBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(floatValues_); + } + } + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.Builder addFloatValuesBuilder() { + return getFloatValuesFieldBuilder().addBuilder( + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.getDefaultInstance()); + } + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.Builder addFloatValuesBuilder( + int index) { + return getFloatValuesFieldBuilder().addBuilder( + index, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.getDefaultInstance()); + } + /** + * repeated .gandiva.types.FloatNode floatValues = 1; + */ + public java.util.List + getFloatValuesBuilderList() { + return getFloatValuesFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNodeOrBuilder> + getFloatValuesFieldBuilder() { + if (floatValuesBuilder_ == null) { + floatValuesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.FloatNodeOrBuilder>( + floatValues_, + ((bitField0_ & 0x00000001) != 0), + getParentForChildren(), + isClean()); + floatValues_ = null; + } + return floatValuesBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.FloatConstants) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.FloatConstants) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public FloatConstants parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.FloatConstants getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface DoubleConstantsOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.DoubleConstants) + com.google.protobuf.MessageOrBuilder { + + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + java.util.List + getDoubleValuesList(); + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode getDoubleValues(int index); + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + int getDoubleValuesCount(); + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + java.util.List + getDoubleValuesOrBuilderList(); + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNodeOrBuilder getDoubleValuesOrBuilder( + int index); + } + /** + * Protobuf type {@code gandiva.types.DoubleConstants} + */ + public static final class DoubleConstants extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.DoubleConstants) + DoubleConstantsOrBuilder { + private static final long serialVersionUID = 0L; + // Use DoubleConstants.newBuilder() to construct. + private DoubleConstants(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private DoubleConstants() { + doubleValues_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new DoubleConstants(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_DoubleConstants_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_DoubleConstants_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants.class, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants.Builder.class); + } + + public static final int DOUBLEVALUES_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private java.util.List doubleValues_; + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + @java.lang.Override + public java.util.List getDoubleValuesList() { + return doubleValues_; + } + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + @java.lang.Override + public java.util.List + getDoubleValuesOrBuilderList() { + return doubleValues_; + } + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + @java.lang.Override + public int getDoubleValuesCount() { + return doubleValues_.size(); + } + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode getDoubleValues(int index) { + return doubleValues_.get(index); + } + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNodeOrBuilder getDoubleValuesOrBuilder( + int index) { + return doubleValues_.get(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < doubleValues_.size(); i++) { + output.writeMessage(1, doubleValues_.get(i)); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < doubleValues_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, doubleValues_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants other = (org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants) obj; + + if (!getDoubleValuesList() + .equals(other.getDoubleValuesList())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getDoubleValuesCount() > 0) { + hash = (37 * hash) + DOUBLEVALUES_FIELD_NUMBER; + hash = (53 * hash) + getDoubleValuesList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.DoubleConstants} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.DoubleConstants) + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstantsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_DoubleConstants_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_DoubleConstants_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants.class, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + if (doubleValuesBuilder_ == null) { + doubleValues_ = java.util.Collections.emptyList(); + } else { + doubleValues_ = null; + doubleValuesBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_DoubleConstants_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants result = new org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields(org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants result) { + if (doubleValuesBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + doubleValues_ = java.util.Collections.unmodifiableList(doubleValues_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.doubleValues_ = doubleValues_; + } else { + result.doubleValues_ = doubleValuesBuilder_.build(); + } + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants result) { + int from_bitField0_ = bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants.getDefaultInstance()) return this; + if (doubleValuesBuilder_ == null) { + if (!other.doubleValues_.isEmpty()) { + if (doubleValues_.isEmpty()) { + doubleValues_ = other.doubleValues_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureDoubleValuesIsMutable(); + doubleValues_.addAll(other.doubleValues_); + } + onChanged(); + } + } else { + if (!other.doubleValues_.isEmpty()) { + if (doubleValuesBuilder_.isEmpty()) { + doubleValuesBuilder_.dispose(); + doubleValuesBuilder_ = null; + doubleValues_ = other.doubleValues_; + bitField0_ = (bitField0_ & ~0x00000001); + doubleValuesBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getDoubleValuesFieldBuilder() : null; + } else { + doubleValuesBuilder_.addAllMessages(other.doubleValues_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode m = + input.readMessage( + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.parser(), + extensionRegistry); + if (doubleValuesBuilder_ == null) { + ensureDoubleValuesIsMutable(); + doubleValues_.add(m); + } else { + doubleValuesBuilder_.addMessage(m); + } + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.util.List doubleValues_ = + java.util.Collections.emptyList(); + private void ensureDoubleValuesIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + doubleValues_ = new java.util.ArrayList(doubleValues_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNodeOrBuilder> doubleValuesBuilder_; + + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + public java.util.List getDoubleValuesList() { + if (doubleValuesBuilder_ == null) { + return java.util.Collections.unmodifiableList(doubleValues_); + } else { + return doubleValuesBuilder_.getMessageList(); + } + } + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + public int getDoubleValuesCount() { + if (doubleValuesBuilder_ == null) { + return doubleValues_.size(); + } else { + return doubleValuesBuilder_.getCount(); + } + } + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode getDoubleValues(int index) { + if (doubleValuesBuilder_ == null) { + return doubleValues_.get(index); + } else { + return doubleValuesBuilder_.getMessage(index); + } + } + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + public Builder setDoubleValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode value) { + if (doubleValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDoubleValuesIsMutable(); + doubleValues_.set(index, value); + onChanged(); + } else { + doubleValuesBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + public Builder setDoubleValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.Builder builderForValue) { + if (doubleValuesBuilder_ == null) { + ensureDoubleValuesIsMutable(); + doubleValues_.set(index, builderForValue.build()); + onChanged(); + } else { + doubleValuesBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + public Builder addDoubleValues(org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode value) { + if (doubleValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDoubleValuesIsMutable(); + doubleValues_.add(value); + onChanged(); + } else { + doubleValuesBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + public Builder addDoubleValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode value) { + if (doubleValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureDoubleValuesIsMutable(); + doubleValues_.add(index, value); + onChanged(); + } else { + doubleValuesBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + public Builder addDoubleValues( + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.Builder builderForValue) { + if (doubleValuesBuilder_ == null) { + ensureDoubleValuesIsMutable(); + doubleValues_.add(builderForValue.build()); + onChanged(); + } else { + doubleValuesBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + public Builder addDoubleValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.Builder builderForValue) { + if (doubleValuesBuilder_ == null) { + ensureDoubleValuesIsMutable(); + doubleValues_.add(index, builderForValue.build()); + onChanged(); + } else { + doubleValuesBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + public Builder addAllDoubleValues( + java.lang.Iterable values) { + if (doubleValuesBuilder_ == null) { + ensureDoubleValuesIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, doubleValues_); + onChanged(); + } else { + doubleValuesBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + public Builder clearDoubleValues() { + if (doubleValuesBuilder_ == null) { + doubleValues_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + doubleValuesBuilder_.clear(); + } + return this; + } + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + public Builder removeDoubleValues(int index) { + if (doubleValuesBuilder_ == null) { + ensureDoubleValuesIsMutable(); + doubleValues_.remove(index); + onChanged(); + } else { + doubleValuesBuilder_.remove(index); + } + return this; + } + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.Builder getDoubleValuesBuilder( + int index) { + return getDoubleValuesFieldBuilder().getBuilder(index); + } + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNodeOrBuilder getDoubleValuesOrBuilder( + int index) { + if (doubleValuesBuilder_ == null) { + return doubleValues_.get(index); } else { + return doubleValuesBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + public java.util.List + getDoubleValuesOrBuilderList() { + if (doubleValuesBuilder_ != null) { + return doubleValuesBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(doubleValues_); + } + } + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.Builder addDoubleValuesBuilder() { + return getDoubleValuesFieldBuilder().addBuilder( + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.getDefaultInstance()); + } + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.Builder addDoubleValuesBuilder( + int index) { + return getDoubleValuesFieldBuilder().addBuilder( + index, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.getDefaultInstance()); + } + /** + * repeated .gandiva.types.DoubleNode doubleValues = 1; + */ + public java.util.List + getDoubleValuesBuilderList() { + return getDoubleValuesFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNodeOrBuilder> + getDoubleValuesFieldBuilder() { + if (doubleValuesBuilder_ == null) { + doubleValuesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleNodeOrBuilder>( + doubleValues_, + ((bitField0_ & 0x00000001) != 0), + getParentForChildren(), + isClean()); + doubleValues_ = null; + } + return doubleValuesBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.DoubleConstants) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.DoubleConstants) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public DoubleConstants parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.DoubleConstants getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface StringConstantsOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.StringConstants) + com.google.protobuf.MessageOrBuilder { + + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + java.util.List + getStringValuesList(); + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode getStringValues(int index); + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + int getStringValuesCount(); + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + java.util.List + getStringValuesOrBuilderList(); + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.StringNodeOrBuilder getStringValuesOrBuilder( + int index); + } + /** + * Protobuf type {@code gandiva.types.StringConstants} + */ + public static final class StringConstants extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.StringConstants) + StringConstantsOrBuilder { + private static final long serialVersionUID = 0L; + // Use StringConstants.newBuilder() to construct. + private StringConstants(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private StringConstants() { + stringValues_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new StringConstants(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_StringConstants_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_StringConstants_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants.class, org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants.Builder.class); + } + + public static final int STRINGVALUES_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private java.util.List stringValues_; + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + @java.lang.Override + public java.util.List getStringValuesList() { + return stringValues_; + } + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + @java.lang.Override + public java.util.List + getStringValuesOrBuilderList() { + return stringValues_; + } + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + @java.lang.Override + public int getStringValuesCount() { + return stringValues_.size(); + } + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode getStringValues(int index) { + return stringValues_.get(index); + } + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringNodeOrBuilder getStringValuesOrBuilder( + int index) { + return stringValues_.get(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < stringValues_.size(); i++) { + output.writeMessage(1, stringValues_.get(i)); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < stringValues_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, stringValues_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants other = (org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants) obj; + + if (!getStringValuesList() + .equals(other.getStringValuesList())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getStringValuesCount() > 0) { + hash = (37 * hash) + STRINGVALUES_FIELD_NUMBER; + hash = (53 * hash) + getStringValuesList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.StringConstants} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.StringConstants) + org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstantsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_StringConstants_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_StringConstants_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants.class, org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + if (stringValuesBuilder_ == null) { + stringValues_ = java.util.Collections.emptyList(); + } else { + stringValues_ = null; + stringValuesBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_StringConstants_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants result = new org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields(org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants result) { + if (stringValuesBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + stringValues_ = java.util.Collections.unmodifiableList(stringValues_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.stringValues_ = stringValues_; + } else { + result.stringValues_ = stringValuesBuilder_.build(); + } + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants result) { + int from_bitField0_ = bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants.getDefaultInstance()) return this; + if (stringValuesBuilder_ == null) { + if (!other.stringValues_.isEmpty()) { + if (stringValues_.isEmpty()) { + stringValues_ = other.stringValues_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureStringValuesIsMutable(); + stringValues_.addAll(other.stringValues_); + } + onChanged(); + } + } else { + if (!other.stringValues_.isEmpty()) { + if (stringValuesBuilder_.isEmpty()) { + stringValuesBuilder_.dispose(); + stringValuesBuilder_ = null; + stringValues_ = other.stringValues_; + bitField0_ = (bitField0_ & ~0x00000001); + stringValuesBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getStringValuesFieldBuilder() : null; + } else { + stringValuesBuilder_.addAllMessages(other.stringValues_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode m = + input.readMessage( + org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.parser(), + extensionRegistry); + if (stringValuesBuilder_ == null) { + ensureStringValuesIsMutable(); + stringValues_.add(m); + } else { + stringValuesBuilder_.addMessage(m); + } + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.util.List stringValues_ = + java.util.Collections.emptyList(); + private void ensureStringValuesIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + stringValues_ = new java.util.ArrayList(stringValues_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode, org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.StringNodeOrBuilder> stringValuesBuilder_; + + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + public java.util.List getStringValuesList() { + if (stringValuesBuilder_ == null) { + return java.util.Collections.unmodifiableList(stringValues_); + } else { + return stringValuesBuilder_.getMessageList(); + } + } + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + public int getStringValuesCount() { + if (stringValuesBuilder_ == null) { + return stringValues_.size(); + } else { + return stringValuesBuilder_.getCount(); + } + } + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode getStringValues(int index) { + if (stringValuesBuilder_ == null) { + return stringValues_.get(index); + } else { + return stringValuesBuilder_.getMessage(index); + } + } + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + public Builder setStringValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode value) { + if (stringValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureStringValuesIsMutable(); + stringValues_.set(index, value); + onChanged(); + } else { + stringValuesBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + public Builder setStringValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.Builder builderForValue) { + if (stringValuesBuilder_ == null) { + ensureStringValuesIsMutable(); + stringValues_.set(index, builderForValue.build()); + onChanged(); + } else { + stringValuesBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + public Builder addStringValues(org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode value) { + if (stringValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureStringValuesIsMutable(); + stringValues_.add(value); + onChanged(); + } else { + stringValuesBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + public Builder addStringValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode value) { + if (stringValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureStringValuesIsMutable(); + stringValues_.add(index, value); + onChanged(); + } else { + stringValuesBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + public Builder addStringValues( + org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.Builder builderForValue) { + if (stringValuesBuilder_ == null) { + ensureStringValuesIsMutable(); + stringValues_.add(builderForValue.build()); + onChanged(); + } else { + stringValuesBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + public Builder addStringValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.Builder builderForValue) { + if (stringValuesBuilder_ == null) { + ensureStringValuesIsMutable(); + stringValues_.add(index, builderForValue.build()); + onChanged(); + } else { + stringValuesBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + public Builder addAllStringValues( + java.lang.Iterable values) { + if (stringValuesBuilder_ == null) { + ensureStringValuesIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, stringValues_); + onChanged(); + } else { + stringValuesBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + public Builder clearStringValues() { + if (stringValuesBuilder_ == null) { + stringValues_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + stringValuesBuilder_.clear(); + } + return this; + } + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + public Builder removeStringValues(int index) { + if (stringValuesBuilder_ == null) { + ensureStringValuesIsMutable(); + stringValues_.remove(index); + onChanged(); + } else { + stringValuesBuilder_.remove(index); + } + return this; + } + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.Builder getStringValuesBuilder( + int index) { + return getStringValuesFieldBuilder().getBuilder(index); + } + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringNodeOrBuilder getStringValuesOrBuilder( + int index) { + if (stringValuesBuilder_ == null) { + return stringValues_.get(index); } else { + return stringValuesBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + public java.util.List + getStringValuesOrBuilderList() { + if (stringValuesBuilder_ != null) { + return stringValuesBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(stringValues_); + } + } + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.Builder addStringValuesBuilder() { + return getStringValuesFieldBuilder().addBuilder( + org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.getDefaultInstance()); + } + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.Builder addStringValuesBuilder( + int index) { + return getStringValuesFieldBuilder().addBuilder( + index, org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.getDefaultInstance()); + } + /** + * repeated .gandiva.types.StringNode stringValues = 1; + */ + public java.util.List + getStringValuesBuilderList() { + return getStringValuesFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode, org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.StringNodeOrBuilder> + getStringValuesFieldBuilder() { + if (stringValuesBuilder_ == null) { + stringValuesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode, org.apache.arrow.gandiva.ipc.GandivaTypes.StringNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.StringNodeOrBuilder>( + stringValues_, + ((bitField0_ & 0x00000001) != 0), + getParentForChildren(), + isClean()); + stringValues_ = null; + } + return stringValuesBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.StringConstants) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.StringConstants) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public StringConstants parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.StringConstants getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface BinaryConstantsOrBuilder extends + // @@protoc_insertion_point(interface_extends:gandiva.types.BinaryConstants) + com.google.protobuf.MessageOrBuilder { + + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + java.util.List + getBinaryValuesList(); + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode getBinaryValues(int index); + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + int getBinaryValuesCount(); + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + java.util.List + getBinaryValuesOrBuilderList(); + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNodeOrBuilder getBinaryValuesOrBuilder( + int index); + } + /** + * Protobuf type {@code gandiva.types.BinaryConstants} + */ + public static final class BinaryConstants extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:gandiva.types.BinaryConstants) + BinaryConstantsOrBuilder { + private static final long serialVersionUID = 0L; + // Use BinaryConstants.newBuilder() to construct. + private BinaryConstants(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private BinaryConstants() { + binaryValues_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new BinaryConstants(); + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_BinaryConstants_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_BinaryConstants_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants.class, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants.Builder.class); + } + + public static final int BINARYVALUES_FIELD_NUMBER = 1; + @SuppressWarnings("serial") + private java.util.List binaryValues_; + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + @java.lang.Override + public java.util.List getBinaryValuesList() { + return binaryValues_; + } + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + @java.lang.Override + public java.util.List + getBinaryValuesOrBuilderList() { + return binaryValues_; + } + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + @java.lang.Override + public int getBinaryValuesCount() { + return binaryValues_.size(); + } + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode getBinaryValues(int index) { + return binaryValues_.get(index); + } + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNodeOrBuilder getBinaryValuesOrBuilder( + int index) { + return binaryValues_.get(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < binaryValues_.size(); i++) { + output.writeMessage(1, binaryValues_.get(i)); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < binaryValues_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, binaryValues_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants)) { + return super.equals(obj); + } + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants other = (org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants) obj; + + if (!getBinaryValuesList() + .equals(other.getBinaryValuesList())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getBinaryValuesCount() > 0) { + hash = (37 * hash) + BINARYVALUES_FIELD_NUMBER; + hash = (53 * hash) + getBinaryValuesList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code gandiva.types.BinaryConstants} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:gandiva.types.BinaryConstants) + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstantsOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_BinaryConstants_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_BinaryConstants_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants.class, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants.Builder.class); + } + + // Construct using org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + if (binaryValuesBuilder_ == null) { + binaryValues_ = java.util.Collections.emptyList(); + } else { + binaryValues_ = null; + binaryValuesBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.internal_static_gandiva_types_BinaryConstants_descriptor; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants getDefaultInstanceForType() { + return org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants.getDefaultInstance(); + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants build() { + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants buildPartial() { + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants result = new org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants(this); + buildPartialRepeatedFields(result); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartialRepeatedFields(org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants result) { + if (binaryValuesBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + binaryValues_ = java.util.Collections.unmodifiableList(binaryValues_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.binaryValues_ = binaryValues_; + } else { + result.binaryValues_ = binaryValuesBuilder_.build(); + } + } + + private void buildPartial0(org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants result) { + int from_bitField0_ = bitField0_; + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants) { + return mergeFrom((org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants other) { + if (other == org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants.getDefaultInstance()) return this; + if (binaryValuesBuilder_ == null) { + if (!other.binaryValues_.isEmpty()) { + if (binaryValues_.isEmpty()) { + binaryValues_ = other.binaryValues_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureBinaryValuesIsMutable(); + binaryValues_.addAll(other.binaryValues_); + } + onChanged(); + } + } else { + if (!other.binaryValues_.isEmpty()) { + if (binaryValuesBuilder_.isEmpty()) { + binaryValuesBuilder_.dispose(); + binaryValuesBuilder_ = null; + binaryValues_ = other.binaryValues_; + bitField0_ = (bitField0_ & ~0x00000001); + binaryValuesBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getBinaryValuesFieldBuilder() : null; + } else { + binaryValuesBuilder_.addAllMessages(other.binaryValues_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode m = + input.readMessage( + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.parser(), + extensionRegistry); + if (binaryValuesBuilder_ == null) { + ensureBinaryValuesIsMutable(); + binaryValues_.add(m); + } else { + binaryValuesBuilder_.addMessage(m); + } + break; + } // case 10 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private java.util.List binaryValues_ = + java.util.Collections.emptyList(); + private void ensureBinaryValuesIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + binaryValues_ = new java.util.ArrayList(binaryValues_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNodeOrBuilder> binaryValuesBuilder_; + + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + public java.util.List getBinaryValuesList() { + if (binaryValuesBuilder_ == null) { + return java.util.Collections.unmodifiableList(binaryValues_); + } else { + return binaryValuesBuilder_.getMessageList(); + } + } + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + public int getBinaryValuesCount() { + if (binaryValuesBuilder_ == null) { + return binaryValues_.size(); + } else { + return binaryValuesBuilder_.getCount(); + } + } + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode getBinaryValues(int index) { + if (binaryValuesBuilder_ == null) { + return binaryValues_.get(index); + } else { + return binaryValuesBuilder_.getMessage(index); + } + } + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + public Builder setBinaryValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode value) { + if (binaryValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureBinaryValuesIsMutable(); + binaryValues_.set(index, value); + onChanged(); + } else { + binaryValuesBuilder_.setMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + public Builder setBinaryValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.Builder builderForValue) { + if (binaryValuesBuilder_ == null) { + ensureBinaryValuesIsMutable(); + binaryValues_.set(index, builderForValue.build()); + onChanged(); + } else { + binaryValuesBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + public Builder addBinaryValues(org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode value) { + if (binaryValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureBinaryValuesIsMutable(); + binaryValues_.add(value); + onChanged(); + } else { + binaryValuesBuilder_.addMessage(value); + } + return this; + } + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + public Builder addBinaryValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode value) { + if (binaryValuesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureBinaryValuesIsMutable(); + binaryValues_.add(index, value); + onChanged(); + } else { + binaryValuesBuilder_.addMessage(index, value); + } + return this; + } + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + public Builder addBinaryValues( + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.Builder builderForValue) { + if (binaryValuesBuilder_ == null) { + ensureBinaryValuesIsMutable(); + binaryValues_.add(builderForValue.build()); + onChanged(); + } else { + binaryValuesBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + public Builder addBinaryValues( + int index, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.Builder builderForValue) { + if (binaryValuesBuilder_ == null) { + ensureBinaryValuesIsMutable(); + binaryValues_.add(index, builderForValue.build()); + onChanged(); + } else { + binaryValuesBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + public Builder addAllBinaryValues( + java.lang.Iterable values) { + if (binaryValuesBuilder_ == null) { + ensureBinaryValuesIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, binaryValues_); + onChanged(); + } else { + binaryValuesBuilder_.addAllMessages(values); + } + return this; + } + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + public Builder clearBinaryValues() { + if (binaryValuesBuilder_ == null) { + binaryValues_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + binaryValuesBuilder_.clear(); + } + return this; + } + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + public Builder removeBinaryValues(int index) { + if (binaryValuesBuilder_ == null) { + ensureBinaryValuesIsMutable(); + binaryValues_.remove(index); + onChanged(); + } else { + binaryValuesBuilder_.remove(index); + } + return this; + } + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.Builder getBinaryValuesBuilder( + int index) { + return getBinaryValuesFieldBuilder().getBuilder(index); + } + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNodeOrBuilder getBinaryValuesOrBuilder( + int index) { + if (binaryValuesBuilder_ == null) { + return binaryValues_.get(index); } else { + return binaryValuesBuilder_.getMessageOrBuilder(index); + } + } + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + public java.util.List + getBinaryValuesOrBuilderList() { + if (binaryValuesBuilder_ != null) { + return binaryValuesBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(binaryValues_); + } + } + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.Builder addBinaryValuesBuilder() { + return getBinaryValuesFieldBuilder().addBuilder( + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.getDefaultInstance()); + } + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.Builder addBinaryValuesBuilder( + int index) { + return getBinaryValuesFieldBuilder().addBuilder( + index, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.getDefaultInstance()); + } + /** + * repeated .gandiva.types.BinaryNode binaryValues = 1; + */ + public java.util.List + getBinaryValuesBuilderList() { + return getBinaryValuesFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNodeOrBuilder> + getBinaryValuesFieldBuilder() { + if (binaryValuesBuilder_ == null) { + binaryValuesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNode.Builder, org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryNodeOrBuilder>( + binaryValues_, + ((bitField0_ & 0x00000001) != 0), + getParentForChildren(), + isClean()); + binaryValues_ = null; + } + return binaryValuesBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:gandiva.types.BinaryConstants) + } + + // @@protoc_insertion_point(class_scope:gandiva.types.BinaryConstants) + private static final org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants(); + } + + public static org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public BinaryConstants parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.apache.arrow.gandiva.ipc.GandivaTypes.BinaryConstants getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_ExtGandivaType_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_ExtGandivaType_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_Field_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_Field_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_FieldNode_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_FieldNode_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_FunctionNode_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_FunctionNode_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_IfNode_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_IfNode_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_AndNode_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_AndNode_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_OrNode_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_OrNode_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_NullNode_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_NullNode_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_IntNode_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_IntNode_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_FloatNode_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_FloatNode_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_DoubleNode_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_DoubleNode_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_BooleanNode_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_BooleanNode_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_LongNode_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_LongNode_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_StringNode_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_StringNode_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_BinaryNode_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_BinaryNode_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_DecimalNode_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_DecimalNode_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_TreeNode_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_TreeNode_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_ExpressionRoot_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_ExpressionRoot_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_ExpressionList_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_ExpressionList_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_Condition_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_Condition_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_Schema_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_Schema_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_GandivaDataTypes_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_GandivaDataTypes_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_GandivaFunctions_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_GandivaFunctions_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_FunctionSignature_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_FunctionSignature_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_InNode_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_InNode_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_IntConstants_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_IntConstants_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_LongConstants_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_LongConstants_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_DecimalConstants_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_DecimalConstants_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_FloatConstants_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_FloatConstants_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_DoubleConstants_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_DoubleConstants_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_StringConstants_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_StringConstants_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_gandiva_types_BinaryConstants_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_gandiva_types_BinaryConstants_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\023gandiva/types.proto\022\rgandiva.types\"\221\003\n" + + "\016ExtGandivaType\022-\n\004type\030\001 \001(\0162\032.gandiva." + + "types.GandivaTypeH\000\210\001\001\022\022\n\005width\030\002 \001(\rH\001\210" + + "\001\001\022\026\n\tprecision\030\003 \001(\005H\002\210\001\001\022\022\n\005scale\030\004 \001(" + + "\005H\003\210\001\001\022.\n\010dateUnit\030\005 \001(\0162\027.gandiva.types" + + ".DateUnitH\004\210\001\001\022.\n\010timeUnit\030\006 \001(\0162\027.gandi" + + "va.types.TimeUnitH\005\210\001\001\022\025\n\010timeZone\030\007 \001(\t" + + "H\006\210\001\001\0226\n\014intervalType\030\010 \001(\0162\033.gandiva.ty" + + "pes.IntervalTypeH\007\210\001\001B\007\n\005_typeB\010\n\006_width" + + "B\014\n\n_precisionB\010\n\006_scaleB\013\n\t_dateUnitB\013\n" + + "\t_timeUnitB\013\n\t_timeZoneB\017\n\r_intervalType" + + "\"\252\001\n\005Field\022\021\n\004name\030\001 \001(\tH\000\210\001\001\0220\n\004type\030\002 " + + "\001(\0132\035.gandiva.types.ExtGandivaTypeH\001\210\001\001\022" + + "\025\n\010nullable\030\003 \001(\010H\002\210\001\001\022&\n\010children\030\004 \003(\013" + + "2\024.gandiva.types.FieldB\007\n\005_nameB\007\n\005_type" + + "B\013\n\t_nullable\"?\n\tFieldNode\022(\n\005field\030\001 \001(" + + "\0132\024.gandiva.types.FieldH\000\210\001\001B\010\n\006_field\"\252" + + "\001\n\014FunctionNode\022\031\n\014functionName\030\001 \001(\tH\000\210" + + "\001\001\022\'\n\006inArgs\030\002 \003(\0132\027.gandiva.types.TreeN" + + "ode\0226\n\nreturnType\030\003 \001(\0132\035.gandiva.types." + + "ExtGandivaTypeH\001\210\001\001B\017\n\r_functionNameB\r\n\013" + + "_returnType\"\376\001\n\006IfNode\022*\n\004cond\030\001 \001(\0132\027.g" + + "andiva.types.TreeNodeH\000\210\001\001\022.\n\010thenNode\030\002" + + " \001(\0132\027.gandiva.types.TreeNodeH\001\210\001\001\022.\n\010el" + + "seNode\030\003 \001(\0132\027.gandiva.types.TreeNodeH\002\210" + + "\001\001\0226\n\nreturnType\030\004 \001(\0132\035.gandiva.types.E" + + "xtGandivaTypeH\003\210\001\001B\007\n\005_condB\013\n\t_thenNode" + + "B\013\n\t_elseNodeB\r\n\013_returnType\"0\n\007AndNode\022" + + "%\n\004args\030\001 \003(\0132\027.gandiva.types.TreeNode\"/" + + "\n\006OrNode\022%\n\004args\030\001 \003(\0132\027.gandiva.types.T" + + "reeNode\"E\n\010NullNode\0220\n\004type\030\001 \001(\0132\035.gand" + + "iva.types.ExtGandivaTypeH\000\210\001\001B\007\n\005_type\"\'" + + "\n\007IntNode\022\022\n\005value\030\001 \001(\005H\000\210\001\001B\010\n\006_value\"" + + ")\n\tFloatNode\022\022\n\005value\030\001 \001(\002H\000\210\001\001B\010\n\006_val" + + "ue\"*\n\nDoubleNode\022\022\n\005value\030\001 \001(\001H\000\210\001\001B\010\n\006" + + "_value\"+\n\013BooleanNode\022\022\n\005value\030\001 \001(\010H\000\210\001" + + "\001B\010\n\006_value\"(\n\010LongNode\022\022\n\005value\030\001 \001(\003H\000" + + "\210\001\001B\010\n\006_value\"*\n\nStringNode\022\022\n\005value\030\001 \001" + + "(\014H\000\210\001\001B\010\n\006_value\"*\n\nBinaryNode\022\022\n\005value" + + "\030\001 \001(\014H\000\210\001\001B\010\n\006_value\"o\n\013DecimalNode\022\022\n\005" + + "value\030\001 \001(\tH\000\210\001\001\022\026\n\tprecision\030\002 \001(\005H\001\210\001\001" + + "\022\022\n\005scale\030\003 \001(\005H\002\210\001\001B\010\n\006_valueB\014\n\n_preci" + + "sionB\010\n\006_scale\"\257\007\n\010TreeNode\0220\n\tfieldNode" + + "\030\001 \001(\0132\030.gandiva.types.FieldNodeH\000\210\001\001\0220\n" + + "\006fnNode\030\002 \001(\0132\033.gandiva.types.FunctionNo" + + "deH\001\210\001\001\022*\n\006ifNode\030\006 \001(\0132\025.gandiva.types." + + "IfNodeH\002\210\001\001\022,\n\007andNode\030\007 \001(\0132\026.gandiva.t" + + "ypes.AndNodeH\003\210\001\001\022*\n\006orNode\030\010 \001(\0132\025.gand" + + "iva.types.OrNodeH\004\210\001\001\022.\n\010nullNode\030\013 \001(\0132" + + "\027.gandiva.types.NullNodeH\005\210\001\001\022,\n\007intNode" + + "\030\014 \001(\0132\026.gandiva.types.IntNodeH\006\210\001\001\0220\n\tf" + + "loatNode\030\r \001(\0132\030.gandiva.types.FloatNode" + + "H\007\210\001\001\022.\n\010longNode\030\016 \001(\0132\027.gandiva.types." + + "LongNodeH\010\210\001\001\0224\n\013booleanNode\030\017 \001(\0132\032.gan" + + "diva.types.BooleanNodeH\t\210\001\001\0222\n\ndoubleNod" + + "e\030\020 \001(\0132\031.gandiva.types.DoubleNodeH\n\210\001\001\022" + + "2\n\nstringNode\030\021 \001(\0132\031.gandiva.types.Stri" + + "ngNodeH\013\210\001\001\0222\n\nbinaryNode\030\022 \001(\0132\031.gandiv" + + "a.types.BinaryNodeH\014\210\001\001\0224\n\013decimalNode\030\023" + + " \001(\0132\032.gandiva.types.DecimalNodeH\r\210\001\001\022*\n" + + "\006inNode\030\025 \001(\0132\025.gandiva.types.InNodeH\016\210\001" + + "\001B\014\n\n_fieldNodeB\t\n\007_fnNodeB\t\n\007_ifNodeB\n\n" + + "\010_andNodeB\t\n\007_orNodeB\013\n\t_nullNodeB\n\n\010_in" + + "tNodeB\014\n\n_floatNodeB\013\n\t_longNodeB\016\n\014_boo" + + "leanNodeB\r\n\013_doubleNodeB\r\n\013_stringNodeB\r" + + "\n\013_binaryNodeB\016\n\014_decimalNodeB\t\n\007_inNode" + + "\"\203\001\n\016ExpressionRoot\022*\n\004root\030\001 \001(\0132\027.gand" + + "iva.types.TreeNodeH\000\210\001\001\022-\n\nresultType\030\002 " + + "\001(\0132\024.gandiva.types.FieldH\001\210\001\001B\007\n\005_rootB" + + "\r\n\013_resultType\">\n\016ExpressionList\022,\n\005expr" + + "s\030\002 \003(\0132\035.gandiva.types.ExpressionRoot\"@" + + "\n\tCondition\022*\n\004root\030\001 \001(\0132\027.gandiva.type" + + "s.TreeNodeH\000\210\001\001B\007\n\005_root\"/\n\006Schema\022%\n\007co" + + "lumns\030\001 \003(\0132\024.gandiva.types.Field\"C\n\020Gan" + + "divaDataTypes\022/\n\010dataType\030\001 \003(\0132\035.gandiv" + + "a.types.ExtGandivaType\"F\n\020GandivaFunctio" + + "ns\0222\n\010function\030\001 \003(\0132 .gandiva.types.Fun" + + "ctionSignature\"\251\001\n\021FunctionSignature\022\021\n\004" + + "name\030\001 \001(\tH\000\210\001\001\0226\n\nreturnType\030\002 \001(\0132\035.ga" + + "ndiva.types.ExtGandivaTypeH\001\210\001\001\0221\n\nparam" + + "Types\030\003 \003(\0132\035.gandiva.types.ExtGandivaTy" + + "peB\007\n\005_nameB\r\n\013_returnType\"\302\004\n\006InNode\022*\n" + + "\004node\030\001 \001(\0132\027.gandiva.types.TreeNodeH\000\210\001" + + "\001\0223\n\tintValues\030\002 \001(\0132\033.gandiva.types.Int" + + "ConstantsH\001\210\001\001\0225\n\nlongValues\030\003 \001(\0132\034.gan" + + "diva.types.LongConstantsH\002\210\001\001\0229\n\014stringV" + + "alues\030\004 \001(\0132\036.gandiva.types.StringConsta" + + "ntsH\003\210\001\001\0229\n\014binaryValues\030\005 \001(\0132\036.gandiva" + + ".types.BinaryConstantsH\004\210\001\001\022;\n\rdecimalVa" + + "lues\030\006 \001(\0132\037.gandiva.types.DecimalConsta" + + "ntsH\005\210\001\001\0227\n\013floatValues\030\007 \001(\0132\035.gandiva." + + "types.FloatConstantsH\006\210\001\001\0229\n\014doubleValue" + + "s\030\010 \001(\0132\036.gandiva.types.DoubleConstantsH" + + "\007\210\001\001B\007\n\005_nodeB\014\n\n_intValuesB\r\n\013_longValu" + + "esB\017\n\r_stringValuesB\017\n\r_binaryValuesB\020\n\016" + + "_decimalValuesB\016\n\014_floatValuesB\017\n\r_doubl" + + "eValues\"9\n\014IntConstants\022)\n\tintValues\030\001 \003" + + "(\0132\026.gandiva.types.IntNode\"<\n\rLongConsta" + + "nts\022+\n\nlongValues\030\001 \003(\0132\027.gandiva.types." + + "LongNode\"E\n\020DecimalConstants\0221\n\rdecimalV" + + "alues\030\001 \003(\0132\032.gandiva.types.DecimalNode\"" + + "?\n\016FloatConstants\022-\n\013floatValues\030\001 \003(\0132\030" + + ".gandiva.types.FloatNode\"B\n\017DoubleConsta" + + "nts\022/\n\014doubleValues\030\001 \003(\0132\031.gandiva.type" + + "s.DoubleNode\"B\n\017StringConstants\022/\n\014strin" + + "gValues\030\001 \003(\0132\031.gandiva.types.StringNode" + + "\"B\n\017BinaryConstants\022/\n\014binaryValues\030\001 \003(" + + "\0132\031.gandiva.types.BinaryNode*\343\002\n\013Gandiva" + + "Type\022\010\n\004NONE\020\000\022\010\n\004BOOL\020\001\022\t\n\005UINT8\020\002\022\010\n\004I" + + "NT8\020\003\022\n\n\006UINT16\020\004\022\t\n\005INT16\020\005\022\n\n\006UINT32\020\006" + + "\022\t\n\005INT32\020\007\022\n\n\006UINT64\020\010\022\t\n\005INT64\020\t\022\016\n\nHA" + + "LF_FLOAT\020\n\022\t\n\005FLOAT\020\013\022\n\n\006DOUBLE\020\014\022\010\n\004UTF" + + "8\020\r\022\n\n\006BINARY\020\016\022\025\n\021FIXED_SIZE_BINARY\020\017\022\n" + + "\n\006DATE32\020\020\022\n\n\006DATE64\020\021\022\r\n\tTIMESTAMP\020\022\022\n\n" + + "\006TIME32\020\023\022\n\n\006TIME64\020\024\022\014\n\010INTERVAL\020\025\022\013\n\007D" + + "ECIMAL\020\026\022\010\n\004LIST\020\027\022\n\n\006STRUCT\020\030\022\t\n\005UNION\020" + + "\031\022\016\n\nDICTIONARY\020\032\022\007\n\003MAP\020\033*\036\n\010DateUnit\022\007" + + "\n\003DAY\020\000\022\t\n\005MILLI\020\001*<\n\010TimeUnit\022\007\n\003SEC\020\000\022" + + "\014\n\010MILLISEC\020\001\022\014\n\010MICROSEC\020\002\022\013\n\007NANOSEC\020\003" + + "*,\n\014IntervalType\022\016\n\nYEAR_MONTH\020\000\022\014\n\010DAY_" + + "TIME\020\001*>\n\023SelectionVectorType\022\013\n\007SV_NONE" + + "\020\000\022\014\n\010SV_INT16\020\001\022\014\n\010SV_INT32\020\002B.\n\034org.ap" + + "ache.arrow.gandiva.ipcB\014GandivaTypesH\001b\006" + + "proto3" + }; + descriptor = com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }); + internal_static_gandiva_types_ExtGandivaType_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_gandiva_types_ExtGandivaType_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_ExtGandivaType_descriptor, + new java.lang.String[] { "Type", "Width", "Precision", "Scale", "DateUnit", "TimeUnit", "TimeZone", "IntervalType", "Type", "Width", "Precision", "Scale", "DateUnit", "TimeUnit", "TimeZone", "IntervalType", }); + internal_static_gandiva_types_Field_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_gandiva_types_Field_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_Field_descriptor, + new java.lang.String[] { "Name", "Type", "Nullable", "Children", "Name", "Type", "Nullable", }); + internal_static_gandiva_types_FieldNode_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_gandiva_types_FieldNode_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_FieldNode_descriptor, + new java.lang.String[] { "Field", "Field", }); + internal_static_gandiva_types_FunctionNode_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_gandiva_types_FunctionNode_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_FunctionNode_descriptor, + new java.lang.String[] { "FunctionName", "InArgs", "ReturnType", "FunctionName", "ReturnType", }); + internal_static_gandiva_types_IfNode_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_gandiva_types_IfNode_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_IfNode_descriptor, + new java.lang.String[] { "Cond", "ThenNode", "ElseNode", "ReturnType", "Cond", "ThenNode", "ElseNode", "ReturnType", }); + internal_static_gandiva_types_AndNode_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_gandiva_types_AndNode_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_AndNode_descriptor, + new java.lang.String[] { "Args", }); + internal_static_gandiva_types_OrNode_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_gandiva_types_OrNode_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_OrNode_descriptor, + new java.lang.String[] { "Args", }); + internal_static_gandiva_types_NullNode_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_gandiva_types_NullNode_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_NullNode_descriptor, + new java.lang.String[] { "Type", "Type", }); + internal_static_gandiva_types_IntNode_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_gandiva_types_IntNode_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_IntNode_descriptor, + new java.lang.String[] { "Value", "Value", }); + internal_static_gandiva_types_FloatNode_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_gandiva_types_FloatNode_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_FloatNode_descriptor, + new java.lang.String[] { "Value", "Value", }); + internal_static_gandiva_types_DoubleNode_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_gandiva_types_DoubleNode_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_DoubleNode_descriptor, + new java.lang.String[] { "Value", "Value", }); + internal_static_gandiva_types_BooleanNode_descriptor = + getDescriptor().getMessageTypes().get(11); + internal_static_gandiva_types_BooleanNode_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_BooleanNode_descriptor, + new java.lang.String[] { "Value", "Value", }); + internal_static_gandiva_types_LongNode_descriptor = + getDescriptor().getMessageTypes().get(12); + internal_static_gandiva_types_LongNode_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_LongNode_descriptor, + new java.lang.String[] { "Value", "Value", }); + internal_static_gandiva_types_StringNode_descriptor = + getDescriptor().getMessageTypes().get(13); + internal_static_gandiva_types_StringNode_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_StringNode_descriptor, + new java.lang.String[] { "Value", "Value", }); + internal_static_gandiva_types_BinaryNode_descriptor = + getDescriptor().getMessageTypes().get(14); + internal_static_gandiva_types_BinaryNode_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_BinaryNode_descriptor, + new java.lang.String[] { "Value", "Value", }); + internal_static_gandiva_types_DecimalNode_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_gandiva_types_DecimalNode_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_DecimalNode_descriptor, + new java.lang.String[] { "Value", "Precision", "Scale", "Value", "Precision", "Scale", }); + internal_static_gandiva_types_TreeNode_descriptor = + getDescriptor().getMessageTypes().get(16); + internal_static_gandiva_types_TreeNode_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_TreeNode_descriptor, + new java.lang.String[] { "FieldNode", "FnNode", "IfNode", "AndNode", "OrNode", "NullNode", "IntNode", "FloatNode", "LongNode", "BooleanNode", "DoubleNode", "StringNode", "BinaryNode", "DecimalNode", "InNode", "FieldNode", "FnNode", "IfNode", "AndNode", "OrNode", "NullNode", "IntNode", "FloatNode", "LongNode", "BooleanNode", "DoubleNode", "StringNode", "BinaryNode", "DecimalNode", "InNode", }); + internal_static_gandiva_types_ExpressionRoot_descriptor = + getDescriptor().getMessageTypes().get(17); + internal_static_gandiva_types_ExpressionRoot_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_ExpressionRoot_descriptor, + new java.lang.String[] { "Root", "ResultType", "Root", "ResultType", }); + internal_static_gandiva_types_ExpressionList_descriptor = + getDescriptor().getMessageTypes().get(18); + internal_static_gandiva_types_ExpressionList_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_ExpressionList_descriptor, + new java.lang.String[] { "Exprs", }); + internal_static_gandiva_types_Condition_descriptor = + getDescriptor().getMessageTypes().get(19); + internal_static_gandiva_types_Condition_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_Condition_descriptor, + new java.lang.String[] { "Root", "Root", }); + internal_static_gandiva_types_Schema_descriptor = + getDescriptor().getMessageTypes().get(20); + internal_static_gandiva_types_Schema_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_Schema_descriptor, + new java.lang.String[] { "Columns", }); + internal_static_gandiva_types_GandivaDataTypes_descriptor = + getDescriptor().getMessageTypes().get(21); + internal_static_gandiva_types_GandivaDataTypes_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_GandivaDataTypes_descriptor, + new java.lang.String[] { "DataType", }); + internal_static_gandiva_types_GandivaFunctions_descriptor = + getDescriptor().getMessageTypes().get(22); + internal_static_gandiva_types_GandivaFunctions_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_GandivaFunctions_descriptor, + new java.lang.String[] { "Function", }); + internal_static_gandiva_types_FunctionSignature_descriptor = + getDescriptor().getMessageTypes().get(23); + internal_static_gandiva_types_FunctionSignature_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_FunctionSignature_descriptor, + new java.lang.String[] { "Name", "ReturnType", "ParamTypes", "Name", "ReturnType", }); + internal_static_gandiva_types_InNode_descriptor = + getDescriptor().getMessageTypes().get(24); + internal_static_gandiva_types_InNode_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_InNode_descriptor, + new java.lang.String[] { "Node", "IntValues", "LongValues", "StringValues", "BinaryValues", "DecimalValues", "FloatValues", "DoubleValues", "Node", "IntValues", "LongValues", "StringValues", "BinaryValues", "DecimalValues", "FloatValues", "DoubleValues", }); + internal_static_gandiva_types_IntConstants_descriptor = + getDescriptor().getMessageTypes().get(25); + internal_static_gandiva_types_IntConstants_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_IntConstants_descriptor, + new java.lang.String[] { "IntValues", }); + internal_static_gandiva_types_LongConstants_descriptor = + getDescriptor().getMessageTypes().get(26); + internal_static_gandiva_types_LongConstants_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_LongConstants_descriptor, + new java.lang.String[] { "LongValues", }); + internal_static_gandiva_types_DecimalConstants_descriptor = + getDescriptor().getMessageTypes().get(27); + internal_static_gandiva_types_DecimalConstants_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_DecimalConstants_descriptor, + new java.lang.String[] { "DecimalValues", }); + internal_static_gandiva_types_FloatConstants_descriptor = + getDescriptor().getMessageTypes().get(28); + internal_static_gandiva_types_FloatConstants_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_FloatConstants_descriptor, + new java.lang.String[] { "FloatValues", }); + internal_static_gandiva_types_DoubleConstants_descriptor = + getDescriptor().getMessageTypes().get(29); + internal_static_gandiva_types_DoubleConstants_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_DoubleConstants_descriptor, + new java.lang.String[] { "DoubleValues", }); + internal_static_gandiva_types_StringConstants_descriptor = + getDescriptor().getMessageTypes().get(30); + internal_static_gandiva_types_StringConstants_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_StringConstants_descriptor, + new java.lang.String[] { "StringValues", }); + internal_static_gandiva_types_BinaryConstants_descriptor = + getDescriptor().getMessageTypes().get(31); + internal_static_gandiva_types_BinaryConstants_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_gandiva_types_BinaryConstants_descriptor, + new java.lang.String[] { "BinaryValues", }); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/any.proto b/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/any.proto new file mode 100644 index 000000000000..eff44e5099da --- /dev/null +++ b/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/any.proto @@ -0,0 +1,162 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option go_package = "google.golang.org/protobuf/types/known/anypb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "AnyProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; + +// `Any` contains an arbitrary serialized protocol buffer message along with a +// URL that describes the type of the serialized message. +// +// Protobuf library provides support to pack/unpack Any values in the form +// of utility functions or additional generated methods of the Any type. +// +// Example 1: Pack and unpack a message in C++. +// +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } +// +// Example 2: Pack and unpack a message in Java. +// +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// // or ... +// if (any.isSameTypeAs(Foo.getDefaultInstance())) { +// foo = any.unpack(Foo.getDefaultInstance()); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := anypb.New(foo) +// if err != nil { +// ... +// } +// ... +// foo := &pb.Foo{} +// if err := any.UnmarshalTo(foo); err != nil { +// ... +// } +// +// The pack methods provided by protobuf library will by default use +// 'type.googleapis.com/full.type.name' as the type URL and the unpack +// methods only use the fully qualified type name after the last '/' +// in the type URL, for example "foo.bar.com/x/y.z" will yield type +// name "y.z". +// +// JSON +// ==== +// The JSON representation of an `Any` value uses the regular +// representation of the deserialized, embedded message, with an +// additional field `@type` which contains the type URL. Example: +// +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } +// +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } +// +// If the embedded message type is well-known and has a custom JSON +// representation, that representation will be embedded adding a field +// `value` which holds the custom JSON in addition to the `@type` +// field. Example (for message [google.protobuf.Duration][]): +// +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// +message Any { + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + // + string type_url = 1; + + // Must be a valid serialized protocol buffer of the above specified type. + bytes value = 2; +} diff --git a/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/api.proto b/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/api.proto new file mode 100644 index 000000000000..422235167018 --- /dev/null +++ b/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/api.proto @@ -0,0 +1,207 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +import "google/protobuf/source_context.proto"; +import "google/protobuf/type.proto"; + +option java_package = "com.google.protobuf"; +option java_outer_classname = "ApiProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option go_package = "google.golang.org/protobuf/types/known/apipb"; + +// Api is a light-weight descriptor for an API Interface. +// +// Interfaces are also described as "protocol buffer services" in some contexts, +// such as by the "service" keyword in a .proto file, but they are different +// from API Services, which represent a concrete implementation of an interface +// as opposed to simply a description of methods and bindings. They are also +// sometimes simply referred to as "APIs" in other contexts, such as the name of +// this message itself. See https://cloud.google.com/apis/design/glossary for +// detailed terminology. +message Api { + // The fully qualified name of this interface, including package name + // followed by the interface's simple name. + string name = 1; + + // The methods of this interface, in unspecified order. + repeated Method methods = 2; + + // Any metadata attached to the interface. + repeated Option options = 3; + + // A version string for this interface. If specified, must have the form + // `major-version.minor-version`, as in `1.10`. If the minor version is + // omitted, it defaults to zero. If the entire version field is empty, the + // major version is derived from the package name, as outlined below. If the + // field is not empty, the version in the package name will be verified to be + // consistent with what is provided here. + // + // The versioning schema uses [semantic + // versioning](http://semver.org) where the major version number + // indicates a breaking change and the minor version an additive, + // non-breaking change. Both version numbers are signals to users + // what to expect from different versions, and should be carefully + // chosen based on the product plan. + // + // The major version is also reflected in the package name of the + // interface, which must end in `v`, as in + // `google.feature.v1`. For major versions 0 and 1, the suffix can + // be omitted. Zero major versions must only be used for + // experimental, non-GA interfaces. + // + string version = 4; + + // Source context for the protocol buffer service represented by this + // message. + SourceContext source_context = 5; + + // Included interfaces. See [Mixin][]. + repeated Mixin mixins = 6; + + // The source syntax of the service. + Syntax syntax = 7; +} + +// Method represents a method of an API interface. +message Method { + // The simple name of this method. + string name = 1; + + // A URL of the input message type. + string request_type_url = 2; + + // If true, the request is streamed. + bool request_streaming = 3; + + // The URL of the output message type. + string response_type_url = 4; + + // If true, the response is streamed. + bool response_streaming = 5; + + // Any metadata attached to the method. + repeated Option options = 6; + + // The source syntax of this method. + Syntax syntax = 7; +} + +// Declares an API Interface to be included in this interface. The including +// interface must redeclare all the methods from the included interface, but +// documentation and options are inherited as follows: +// +// - If after comment and whitespace stripping, the documentation +// string of the redeclared method is empty, it will be inherited +// from the original method. +// +// - Each annotation belonging to the service config (http, +// visibility) which is not set in the redeclared method will be +// inherited. +// +// - If an http annotation is inherited, the path pattern will be +// modified as follows. Any version prefix will be replaced by the +// version of the including interface plus the [root][] path if +// specified. +// +// Example of a simple mixin: +// +// package google.acl.v1; +// service AccessControl { +// // Get the underlying ACL object. +// rpc GetAcl(GetAclRequest) returns (Acl) { +// option (google.api.http).get = "/v1/{resource=**}:getAcl"; +// } +// } +// +// package google.storage.v2; +// service Storage { +// rpc GetAcl(GetAclRequest) returns (Acl); +// +// // Get a data record. +// rpc GetData(GetDataRequest) returns (Data) { +// option (google.api.http).get = "/v2/{resource=**}"; +// } +// } +// +// Example of a mixin configuration: +// +// apis: +// - name: google.storage.v2.Storage +// mixins: +// - name: google.acl.v1.AccessControl +// +// The mixin construct implies that all methods in `AccessControl` are +// also declared with same name and request/response types in +// `Storage`. A documentation generator or annotation processor will +// see the effective `Storage.GetAcl` method after inherting +// documentation and annotations as follows: +// +// service Storage { +// // Get the underlying ACL object. +// rpc GetAcl(GetAclRequest) returns (Acl) { +// option (google.api.http).get = "/v2/{resource=**}:getAcl"; +// } +// ... +// } +// +// Note how the version in the path pattern changed from `v1` to `v2`. +// +// If the `root` field in the mixin is specified, it should be a +// relative path under which inherited HTTP paths are placed. Example: +// +// apis: +// - name: google.storage.v2.Storage +// mixins: +// - name: google.acl.v1.AccessControl +// root: acls +// +// This implies the following inherited HTTP annotation: +// +// service Storage { +// // Get the underlying ACL object. +// rpc GetAcl(GetAclRequest) returns (Acl) { +// option (google.api.http).get = "/v2/acls/{resource=**}:getAcl"; +// } +// ... +// } +message Mixin { + // The fully qualified name of the interface which is included. + string name = 1; + + // If non-empty specifies a path under which inherited HTTP paths + // are rooted. + string root = 2; +} diff --git a/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/descriptor.proto b/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/descriptor.proto new file mode 100644 index 000000000000..474864353a9c --- /dev/null +++ b/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/descriptor.proto @@ -0,0 +1,1218 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// The messages in this file describe the definitions found in .proto files. +// A valid .proto file can be translated directly to a FileDescriptorProto +// without any other information (e.g. without reading its imports). + +syntax = "proto2"; + +package google.protobuf; + +option go_package = "google.golang.org/protobuf/types/descriptorpb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "DescriptorProtos"; +option csharp_namespace = "Google.Protobuf.Reflection"; +option objc_class_prefix = "GPB"; +option cc_enable_arenas = true; + +// descriptor.proto must be optimized for speed because reflection-based +// algorithms don't work during bootstrapping. +option optimize_for = SPEED; + +// The protocol compiler can output a FileDescriptorSet containing the .proto +// files it parses. +message FileDescriptorSet { + repeated FileDescriptorProto file = 1; +} + +// The full set of known editions. +enum Edition { + // A placeholder for an unknown edition value. + EDITION_UNKNOWN = 0; + + // Legacy syntax "editions". These pre-date editions, but behave much like + // distinct editions. These can't be used to specify the edition of proto + // files, but feature definitions must supply proto2/proto3 defaults for + // backwards compatibility. + EDITION_PROTO2 = 998; + EDITION_PROTO3 = 999; + + // Editions that have been released. The specific values are arbitrary and + // should not be depended on, but they will always be time-ordered for easy + // comparison. + EDITION_2023 = 1000; + + // Placeholder editions for testing feature resolution. These should not be + // used or relyed on outside of tests. + EDITION_1_TEST_ONLY = 1; + EDITION_2_TEST_ONLY = 2; + EDITION_99997_TEST_ONLY = 99997; + EDITION_99998_TEST_ONLY = 99998; + EDITION_99999_TEST_ONLY = 99999; +} + +// Describes a complete .proto file. +message FileDescriptorProto { + optional string name = 1; // file name, relative to root of source tree + optional string package = 2; // e.g. "foo", "foo.bar", etc. + + // Names of files imported by this file. + repeated string dependency = 3; + // Indexes of the public imported files in the dependency list above. + repeated int32 public_dependency = 10; + // Indexes of the weak imported files in the dependency list. + // For Google-internal migration only. Do not use. + repeated int32 weak_dependency = 11; + + // All top-level definitions in this file. + repeated DescriptorProto message_type = 4; + repeated EnumDescriptorProto enum_type = 5; + repeated ServiceDescriptorProto service = 6; + repeated FieldDescriptorProto extension = 7; + + optional FileOptions options = 8; + + // This field contains optional information about the original source code. + // You may safely remove this entire field without harming runtime + // functionality of the descriptors -- the information is needed only by + // development tools. + optional SourceCodeInfo source_code_info = 9; + + // The syntax of the proto file. + // The supported values are "proto2", "proto3", and "editions". + // + // If `edition` is present, this value must be "editions". + optional string syntax = 12; + + // The edition of the proto file. + optional Edition edition = 14; +} + +// Describes a message type. +message DescriptorProto { + optional string name = 1; + + repeated FieldDescriptorProto field = 2; + repeated FieldDescriptorProto extension = 6; + + repeated DescriptorProto nested_type = 3; + repeated EnumDescriptorProto enum_type = 4; + + message ExtensionRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + + optional ExtensionRangeOptions options = 3; + } + repeated ExtensionRange extension_range = 5; + + repeated OneofDescriptorProto oneof_decl = 8; + + optional MessageOptions options = 7; + + // Range of reserved tag numbers. Reserved tag numbers may not be used by + // fields or extension ranges in the same message. Reserved ranges may + // not overlap. + message ReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + } + repeated ReservedRange reserved_range = 9; + // Reserved field names, which may not be used by fields in the same message. + // A given name may only be reserved once. + repeated string reserved_name = 10; +} + +message ExtensionRangeOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + message Declaration { + // The extension number declared within the extension range. + optional int32 number = 1; + + // The fully-qualified name of the extension field. There must be a leading + // dot in front of the full name. + optional string full_name = 2; + + // The fully-qualified type name of the extension field. Unlike + // Metadata.type, Declaration.type must have a leading dot for messages + // and enums. + optional string type = 3; + + // If true, indicates that the number is reserved in the extension range, + // and any extension field with the number will fail to compile. Set this + // when a declared extension field is deleted. + optional bool reserved = 5; + + // If true, indicates that the extension must be defined as repeated. + // Otherwise the extension must be defined as optional. + optional bool repeated = 6; + + reserved 4; // removed is_repeated + } + + // For external users: DO NOT USE. We are in the process of open sourcing + // extension declaration and executing internal cleanups before it can be + // used externally. + repeated Declaration declaration = 2 [retention = RETENTION_SOURCE]; + + // Any features defined in the specific edition. + optional FeatureSet features = 50; + + // The verification state of the extension range. + enum VerificationState { + // All the extensions of the range must be declared. + DECLARATION = 0; + UNVERIFIED = 1; + } + + // The verification state of the range. + // TODO: flip the default to DECLARATION once all empty ranges + // are marked as UNVERIFIED. + optional VerificationState verification = 3 [default = UNVERIFIED]; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +// Describes a field within a message. +message FieldDescriptorProto { + enum Type { + // 0 is reserved for errors. + // Order is weird for historical reasons. + TYPE_DOUBLE = 1; + TYPE_FLOAT = 2; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + // negative values are likely. + TYPE_INT64 = 3; + TYPE_UINT64 = 4; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + // negative values are likely. + TYPE_INT32 = 5; + TYPE_FIXED64 = 6; + TYPE_FIXED32 = 7; + TYPE_BOOL = 8; + TYPE_STRING = 9; + // Tag-delimited aggregate. + // Group type is deprecated and not supported after google.protobuf. However, Proto3 + // implementations should still be able to parse the group wire format and + // treat group fields as unknown fields. In Editions, the group wire format + // can be enabled via the `message_encoding` feature. + TYPE_GROUP = 10; + TYPE_MESSAGE = 11; // Length-delimited aggregate. + + // New in version 2. + TYPE_BYTES = 12; + TYPE_UINT32 = 13; + TYPE_ENUM = 14; + TYPE_SFIXED32 = 15; + TYPE_SFIXED64 = 16; + TYPE_SINT32 = 17; // Uses ZigZag encoding. + TYPE_SINT64 = 18; // Uses ZigZag encoding. + } + + enum Label { + // 0 is reserved for errors + LABEL_OPTIONAL = 1; + LABEL_REPEATED = 3; + // The required label is only allowed in google.protobuf. In proto3 and Editions + // it's explicitly prohibited. In Editions, the `field_presence` feature + // can be used to get this behavior. + LABEL_REQUIRED = 2; + } + + optional string name = 1; + optional int32 number = 3; + optional Label label = 4; + + // If type_name is set, this need not be set. If both this and type_name + // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + optional Type type = 5; + + // For message and enum types, this is the name of the type. If the name + // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + // rules are used to find the type (i.e. first the nested types within this + // message are searched, then within the parent, on up to the root + // namespace). + optional string type_name = 6; + + // For extensions, this is the name of the type being extended. It is + // resolved in the same manner as type_name. + optional string extendee = 2; + + // For numeric types, contains the original text representation of the value. + // For booleans, "true" or "false". + // For strings, contains the default text contents (not escaped in any way). + // For bytes, contains the C escaped value. All bytes >= 128 are escaped. + optional string default_value = 7; + + // If set, gives the index of a oneof in the containing type's oneof_decl + // list. This field is a member of that oneof. + optional int32 oneof_index = 9; + + // JSON name of this field. The value is set by protocol compiler. If the + // user has set a "json_name" option on this field, that option's value + // will be used. Otherwise, it's deduced from the field's name by converting + // it to camelCase. + optional string json_name = 10; + + optional FieldOptions options = 8; + + // If true, this is a proto3 "optional". When a proto3 field is optional, it + // tracks presence regardless of field type. + // + // When proto3_optional is true, this field must be belong to a oneof to + // signal to old proto3 clients that presence is tracked for this field. This + // oneof is known as a "synthetic" oneof, and this field must be its sole + // member (each proto3 optional field gets its own synthetic oneof). Synthetic + // oneofs exist in the descriptor only, and do not generate any API. Synthetic + // oneofs must be ordered after all "real" oneofs. + // + // For message fields, proto3_optional doesn't create any semantic change, + // since non-repeated message fields always track presence. However it still + // indicates the semantic detail of whether the user wrote "optional" or not. + // This can be useful for round-tripping the .proto file. For consistency we + // give message fields a synthetic oneof also, even though it is not required + // to track presence. This is especially important because the parser can't + // tell if a field is a message or an enum, so it must always create a + // synthetic oneof. + // + // Proto2 optional fields do not set this flag, because they already indicate + // optional with `LABEL_OPTIONAL`. + optional bool proto3_optional = 17; +} + +// Describes a oneof. +message OneofDescriptorProto { + optional string name = 1; + optional OneofOptions options = 2; +} + +// Describes an enum type. +message EnumDescriptorProto { + optional string name = 1; + + repeated EnumValueDescriptorProto value = 2; + + optional EnumOptions options = 3; + + // Range of reserved numeric values. Reserved values may not be used by + // entries in the same enum. Reserved ranges may not overlap. + // + // Note that this is distinct from DescriptorProto.ReservedRange in that it + // is inclusive such that it can appropriately represent the entire int32 + // domain. + message EnumReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Inclusive. + } + + // Range of reserved numeric values. Reserved numeric values may not be used + // by enum values in the same enum declaration. Reserved ranges may not + // overlap. + repeated EnumReservedRange reserved_range = 4; + + // Reserved enum value names, which may not be reused. A given name may only + // be reserved once. + repeated string reserved_name = 5; +} + +// Describes a value within an enum. +message EnumValueDescriptorProto { + optional string name = 1; + optional int32 number = 2; + + optional EnumValueOptions options = 3; +} + +// Describes a service. +message ServiceDescriptorProto { + optional string name = 1; + repeated MethodDescriptorProto method = 2; + + optional ServiceOptions options = 3; +} + +// Describes a method of a service. +message MethodDescriptorProto { + optional string name = 1; + + // Input and output type names. These are resolved in the same way as + // FieldDescriptorProto.type_name, but must refer to a message type. + optional string input_type = 2; + optional string output_type = 3; + + optional MethodOptions options = 4; + + // Identifies if client streams multiple client messages + optional bool client_streaming = 5 [default = false]; + // Identifies if server streams multiple server messages + optional bool server_streaming = 6 [default = false]; +} + +// =================================================================== +// Options + +// Each of the definitions above may have "options" attached. These are +// just annotations which may cause code to be generated slightly differently +// or may contain hints for code that manipulates protocol messages. +// +// Clients may define custom options as extensions of the *Options messages. +// These extensions may not yet be known at parsing time, so the parser cannot +// store the values in them. Instead it stores them in a field in the *Options +// message called uninterpreted_option. This field must have the same name +// across all *Options messages. We then use this field to populate the +// extensions when we build a descriptor, at which point all protos have been +// parsed and so all extensions are known. +// +// Extension numbers for custom options may be chosen as follows: +// * For options which will only be used within a single application or +// organization, or for experimental options, use field numbers 50000 +// through 99999. It is up to you to ensure that you do not use the +// same number for multiple options. +// * For options which will be published and used publicly by multiple +// independent entities, e-mail protobuf-global-extension-registry@google.com +// to reserve extension numbers. Simply provide your project name (e.g. +// Objective-C plugin) and your project website (if available) -- there's no +// need to explain how you intend to use them. Usually you only need one +// extension number. You can declare multiple options with only one extension +// number by putting them in a sub-message. See the Custom Options section of +// the docs for examples: +// https://developers.google.com/protocol-buffers/docs/proto#options +// If this turns out to be popular, a web service will be set up +// to automatically assign option numbers. + +message FileOptions { + + // Sets the Java package where classes generated from this .proto will be + // placed. By default, the proto package is used, but this is often + // inappropriate because proto packages do not normally start with backwards + // domain names. + optional string java_package = 1; + + // Controls the name of the wrapper Java class generated for the .proto file. + // That class will always contain the .proto file's getDescriptor() method as + // well as any top-level extensions defined in the .proto file. + // If java_multiple_files is disabled, then all the other classes from the + // .proto file will be nested inside the single wrapper outer class. + optional string java_outer_classname = 8; + + // If enabled, then the Java code generator will generate a separate .java + // file for each top-level message, enum, and service defined in the .proto + // file. Thus, these types will *not* be nested inside the wrapper class + // named by java_outer_classname. However, the wrapper class will still be + // generated to contain the file's getDescriptor() method as well as any + // top-level extensions defined in the file. + optional bool java_multiple_files = 10 [default = false]; + + // This option does nothing. + optional bool java_generate_equals_and_hash = 20 [deprecated=true]; + + // If set true, then the Java2 code generator will generate code that + // throws an exception whenever an attempt is made to assign a non-UTF-8 + // byte sequence to a string field. + // Message reflection will do the same. + // However, an extension field still accepts non-UTF-8 byte sequences. + // This option has no effect on when used with the lite runtime. + optional bool java_string_check_utf8 = 27 [default = false]; + + // Generated classes can be optimized for speed or code size. + enum OptimizeMode { + SPEED = 1; // Generate complete code for parsing, serialization, + // etc. + CODE_SIZE = 2; // Use ReflectionOps to implement these methods. + LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime. + } + optional OptimizeMode optimize_for = 9 [default = SPEED]; + + // Sets the Go package where structs generated from this .proto will be + // placed. If omitted, the Go package will be derived from the following: + // - The basename of the package import path, if provided. + // - Otherwise, the package statement in the .proto file, if present. + // - Otherwise, the basename of the .proto file, without extension. + optional string go_package = 11; + + // Should generic services be generated in each language? "Generic" services + // are not specific to any particular RPC system. They are generated by the + // main code generators in each language (without additional plugins). + // Generic services were the only kind of service generation supported by + // early versions of google.protobuf. + // + // Generic services are now considered deprecated in favor of using plugins + // that generate code specific to your particular RPC system. Therefore, + // these default to false. Old code which depends on generic services should + // explicitly set them to true. + optional bool cc_generic_services = 16 [default = false]; + optional bool java_generic_services = 17 [default = false]; + optional bool py_generic_services = 18 [default = false]; + optional bool php_generic_services = 42 [default = false]; + + // Is this file deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for everything in the file, or it will be completely ignored; in the very + // least, this is a formalization for deprecating files. + optional bool deprecated = 23 [default = false]; + + // Enables the use of arenas for the proto messages in this file. This applies + // only to generated classes for C++. + optional bool cc_enable_arenas = 31 [default = true]; + + // Sets the objective c class prefix which is prepended to all objective c + // generated classes from this .proto. There is no default. + optional string objc_class_prefix = 36; + + // Namespace for generated classes; defaults to the package. + optional string csharp_namespace = 37; + + // By default Swift generators will take the proto package and CamelCase it + // replacing '.' with underscore and use that to prefix the types/symbols + // defined. When this options is provided, they will use this value instead + // to prefix the types/symbols defined. + optional string swift_prefix = 39; + + // Sets the php class prefix which is prepended to all php generated classes + // from this .proto. Default is empty. + optional string php_class_prefix = 40; + + // Use this option to change the namespace of php generated classes. Default + // is empty. When this option is empty, the package name will be used for + // determining the namespace. + optional string php_namespace = 41; + + // Use this option to change the namespace of php generated metadata classes. + // Default is empty. When this option is empty, the proto file name will be + // used for determining the namespace. + optional string php_metadata_namespace = 44; + + // Use this option to change the package of ruby generated classes. Default + // is empty. When this option is not set, the package name will be used for + // determining the ruby package. + optional string ruby_package = 45; + + // Any features defined in the specific edition. + optional FeatureSet features = 50; + + // The parser stores options it doesn't recognize here. + // See the documentation for the "Options" section above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. + // See the documentation for the "Options" section above. + extensions 1000 to max; + + reserved 38; +} + +message MessageOptions { + // Set true to use the old proto1 MessageSet wire format for extensions. + // This is provided for backwards-compatibility with the MessageSet wire + // format. You should not use this for any other reason: It's less + // efficient, has fewer features, and is more complicated. + // + // The message must be defined exactly as follows: + // message Foo { + // option message_set_wire_format = true; + // extensions 4 to max; + // } + // Note that the message cannot have any defined fields; MessageSets only + // have extensions. + // + // All extensions of your type must be singular messages; e.g. they cannot + // be int32s, enums, or repeated messages. + // + // Because this is an option, the above two restrictions are not enforced by + // the protocol compiler. + optional bool message_set_wire_format = 1 [default = false]; + + // Disables the generation of the standard "descriptor()" accessor, which can + // conflict with a field of the same name. This is meant to make migration + // from proto1 easier; new code should avoid fields named "descriptor". + optional bool no_standard_descriptor_accessor = 2 [default = false]; + + // Is this message deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the message, or it will be completely ignored; in the very least, + // this is a formalization for deprecating messages. + optional bool deprecated = 3 [default = false]; + + reserved 4, 5, 6; + + // NOTE: Do not set the option in .proto files. Always use the maps syntax + // instead. The option should only be implicitly set by the proto compiler + // parser. + // + // Whether the message is an automatically generated map entry type for the + // maps field. + // + // For maps fields: + // map map_field = 1; + // The parsed descriptor looks like: + // message MapFieldEntry { + // option map_entry = true; + // optional KeyType key = 1; + // optional ValueType value = 2; + // } + // repeated MapFieldEntry map_field = 1; + // + // Implementations may choose not to generate the map_entry=true message, but + // use a native map in the target language to hold the keys and values. + // The reflection APIs in such implementations still need to work as + // if the field is a repeated message field. + optional bool map_entry = 7; + + reserved 8; // javalite_serializable + reserved 9; // javanano_as_lite + + // Enable the legacy handling of JSON field name conflicts. This lowercases + // and strips underscored from the fields before comparison in proto3 only. + // The new behavior takes `json_name` into account and applies to proto2 as + // well. + // + // This should only be used as a temporary measure against broken builds due + // to the change in behavior for JSON field name conflicts. + // + // TODO This is legacy behavior we plan to remove once downstream + // teams have had time to migrate. + optional bool deprecated_legacy_json_field_conflicts = 11 [deprecated = true]; + + // Any features defined in the specific edition. + optional FeatureSet features = 12; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message FieldOptions { + // The ctype option instructs the C++ code generator to use a different + // representation of the field than it normally would. See the specific + // options below. This option is only implemented to support use of + // [ctype=CORD] and [ctype=STRING] (the default) on non-repeated fields of + // type "bytes" in the open source release -- sorry, we'll try to include + // other types in a future version! + optional CType ctype = 1 [default = STRING]; + enum CType { + // Default mode. + STRING = 0; + + // The option [ctype=CORD] may be applied to a non-repeated field of type + // "bytes". It indicates that in C++, the data should be stored in a Cord + // instead of a string. For very large strings, this may reduce memory + // fragmentation. It may also allow better performance when parsing from a + // Cord, or when parsing with aliasing enabled, as the parsed Cord may then + // alias the original buffer. + CORD = 1; + + STRING_PIECE = 2; + } + // The packed option can be enabled for repeated primitive fields to enable + // a more efficient representation on the wire. Rather than repeatedly + // writing the tag and type for each element, the entire array is encoded as + // a single length-delimited blob. In proto3, only explicit setting it to + // false will avoid using packed encoding. This option is prohibited in + // Editions, but the `repeated_field_encoding` feature can be used to control + // the behavior. + optional bool packed = 2; + + // The jstype option determines the JavaScript type used for values of the + // field. The option is permitted only for 64 bit integral and fixed types + // (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + // is represented as JavaScript string, which avoids loss of precision that + // can happen when a large value is converted to a floating point JavaScript. + // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + // use the JavaScript "number" type. The behavior of the default option + // JS_NORMAL is implementation dependent. + // + // This option is an enum to permit additional types to be added, e.g. + // goog.math.Integer. + optional JSType jstype = 6 [default = JS_NORMAL]; + enum JSType { + // Use the default type. + JS_NORMAL = 0; + + // Use JavaScript strings. + JS_STRING = 1; + + // Use JavaScript numbers. + JS_NUMBER = 2; + } + + // Should this field be parsed lazily? Lazy applies only to message-type + // fields. It means that when the outer message is initially parsed, the + // inner message's contents will not be parsed but instead stored in encoded + // form. The inner message will actually be parsed when it is first accessed. + // + // This is only a hint. Implementations are free to choose whether to use + // eager or lazy parsing regardless of the value of this option. However, + // setting this option true suggests that the protocol author believes that + // using lazy parsing on this field is worth the additional bookkeeping + // overhead typically needed to implement it. + // + // This option does not affect the public interface of any generated code; + // all method signatures remain the same. Furthermore, thread-safety of the + // interface is not affected by this option; const methods remain safe to + // call from multiple threads concurrently, while non-const methods continue + // to require exclusive access. + // + // Note that implementations may choose not to check required fields within + // a lazy sub-message. That is, calling IsInitialized() on the outer message + // may return true even if the inner message has missing required fields. + // This is necessary because otherwise the inner message would have to be + // parsed in order to perform the check, defeating the purpose of lazy + // parsing. An implementation which chooses not to check required fields + // must be consistent about it. That is, for any particular sub-message, the + // implementation must either *always* check its required fields, or *never* + // check its required fields, regardless of whether or not the message has + // been parsed. + // + // As of May 2022, lazy verifies the contents of the byte stream during + // parsing. An invalid byte stream will cause the overall parsing to fail. + optional bool lazy = 5 [default = false]; + + // unverified_lazy does no correctness checks on the byte stream. This should + // only be used where lazy with verification is prohibitive for performance + // reasons. + optional bool unverified_lazy = 15 [default = false]; + + // Is this field deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for accessors, or it will be completely ignored; in the very least, this + // is a formalization for deprecating fields. + optional bool deprecated = 3 [default = false]; + + // For Google-internal migration only. Do not use. + optional bool weak = 10 [default = false]; + + // Indicate that the field value should not be printed out when using debug + // formats, e.g. when the field contains sensitive credentials. + optional bool debug_redact = 16 [default = false]; + + // If set to RETENTION_SOURCE, the option will be omitted from the binary. + // Note: as of January 2023, support for this is in progress and does not yet + // have an effect (b/264593489). + enum OptionRetention { + RETENTION_UNKNOWN = 0; + RETENTION_RUNTIME = 1; + RETENTION_SOURCE = 2; + } + + optional OptionRetention retention = 17; + + // This indicates the types of entities that the field may apply to when used + // as an option. If it is unset, then the field may be freely used as an + // option on any kind of entity. Note: as of January 2023, support for this is + // in progress and does not yet have an effect (b/264593489). + enum OptionTargetType { + TARGET_TYPE_UNKNOWN = 0; + TARGET_TYPE_FILE = 1; + TARGET_TYPE_EXTENSION_RANGE = 2; + TARGET_TYPE_MESSAGE = 3; + TARGET_TYPE_FIELD = 4; + TARGET_TYPE_ONEOF = 5; + TARGET_TYPE_ENUM = 6; + TARGET_TYPE_ENUM_ENTRY = 7; + TARGET_TYPE_SERVICE = 8; + TARGET_TYPE_METHOD = 9; + } + + repeated OptionTargetType targets = 19; + + message EditionDefault { + optional Edition edition = 3; + optional string value = 2; // Textproto value. + } + repeated EditionDefault edition_defaults = 20; + + // Any features defined in the specific edition. + optional FeatureSet features = 21; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; + + reserved 4; // removed jtype + reserved 18; // reserve target, target_obsolete_do_not_use +} + +message OneofOptions { + // Any features defined in the specific edition. + optional FeatureSet features = 1; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumOptions { + + // Set this option to true to allow mapping different tag names to the same + // value. + optional bool allow_alias = 2; + + // Is this enum deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum, or it will be completely ignored; in the very least, this + // is a formalization for deprecating enums. + optional bool deprecated = 3 [default = false]; + + reserved 5; // javanano_as_lite + + // Enable the legacy handling of JSON field name conflicts. This lowercases + // and strips underscored from the fields before comparison in proto3 only. + // The new behavior takes `json_name` into account and applies to proto2 as + // well. + // TODO Remove this legacy behavior once downstream teams have + // had time to migrate. + optional bool deprecated_legacy_json_field_conflicts = 6 [deprecated = true]; + + // Any features defined in the specific edition. + optional FeatureSet features = 7; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumValueOptions { + // Is this enum value deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum value, or it will be completely ignored; in the very least, + // this is a formalization for deprecating enum values. + optional bool deprecated = 1 [default = false]; + + // Any features defined in the specific edition. + optional FeatureSet features = 2; + + // Indicate that fields annotated with this enum value should not be printed + // out when using debug formats, e.g. when the field contains sensitive + // credentials. + optional bool debug_redact = 3 [default = false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message ServiceOptions { + + // Any features defined in the specific edition. + optional FeatureSet features = 34; + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this service deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the service, or it will be completely ignored; in the very least, + // this is a formalization for deprecating services. + optional bool deprecated = 33 [default = false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message MethodOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this method deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the method, or it will be completely ignored; in the very least, + // this is a formalization for deprecating methods. + optional bool deprecated = 33 [default = false]; + + // Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + // or neither? HTTP based RPC implementation may choose GET verb for safe + // methods, and PUT verb for idempotent methods instead of the default POST. + enum IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0; + NO_SIDE_EFFECTS = 1; // implies idempotent + IDEMPOTENT = 2; // idempotent, but may have side effects + } + optional IdempotencyLevel idempotency_level = 34 + [default = IDEMPOTENCY_UNKNOWN]; + + // Any features defined in the specific edition. + optional FeatureSet features = 35; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +// A message representing a option the parser does not recognize. This only +// appears in options protos created by the compiler::Parser class. +// DescriptorPool resolves these when building Descriptor objects. Therefore, +// options protos in descriptor objects (e.g. returned by Descriptor::options(), +// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions +// in them. +message UninterpretedOption { + // The name of the uninterpreted option. Each string represents a segment in + // a dot-separated name. is_extension is true iff a segment represents an + // extension (denoted with parentheses in options specs in .proto files). + // E.g.,{ ["foo", false], ["bar.baz", true], ["moo", false] } represents + // "foo.(bar.baz).moo". + message NamePart { + required string name_part = 1; + required bool is_extension = 2; + } + repeated NamePart name = 2; + + // The value of the uninterpreted option, in whatever type the tokenizer + // identified it as during parsing. Exactly one of these should be set. + optional string identifier_value = 3; + optional uint64 positive_int_value = 4; + optional int64 negative_int_value = 5; + optional double double_value = 6; + optional bytes string_value = 7; + optional string aggregate_value = 8; +} + +// =================================================================== +// Features + +// TODO Enums in C++ gencode (and potentially other languages) are +// not well scoped. This means that each of the feature enums below can clash +// with each other. The short names we've chosen maximize call-site +// readability, but leave us very open to this scenario. A future feature will +// be designed and implemented to handle this, hopefully before we ever hit a +// conflict here. +message FeatureSet { + enum FieldPresence { + FIELD_PRESENCE_UNKNOWN = 0; + EXPLICIT = 1; + IMPLICIT = 2; + LEGACY_REQUIRED = 3; + } + optional FieldPresence field_presence = 1 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_FIELD, + targets = TARGET_TYPE_FILE, + edition_defaults = { edition: EDITION_PROTO2, value: "EXPLICIT" }, + edition_defaults = { edition: EDITION_PROTO3, value: "IMPLICIT" }, + edition_defaults = { edition: EDITION_2023, value: "EXPLICIT" } + ]; + + enum EnumType { + ENUM_TYPE_UNKNOWN = 0; + OPEN = 1; + CLOSED = 2; + } + optional EnumType enum_type = 2 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_ENUM, + targets = TARGET_TYPE_FILE, + edition_defaults = { edition: EDITION_PROTO2, value: "CLOSED" }, + edition_defaults = { edition: EDITION_PROTO3, value: "OPEN" } + ]; + + enum RepeatedFieldEncoding { + REPEATED_FIELD_ENCODING_UNKNOWN = 0; + PACKED = 1; + EXPANDED = 2; + } + optional RepeatedFieldEncoding repeated_field_encoding = 3 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_FIELD, + targets = TARGET_TYPE_FILE, + edition_defaults = { edition: EDITION_PROTO2, value: "EXPANDED" }, + edition_defaults = { edition: EDITION_PROTO3, value: "PACKED" } + ]; + + enum Utf8Validation { + UTF8_VALIDATION_UNKNOWN = 0; + NONE = 1; + VERIFY = 2; + } + optional Utf8Validation utf8_validation = 4 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_FIELD, + targets = TARGET_TYPE_FILE, + edition_defaults = { edition: EDITION_PROTO2, value: "NONE" }, + edition_defaults = { edition: EDITION_PROTO3, value: "VERIFY" } + ]; + + enum MessageEncoding { + MESSAGE_ENCODING_UNKNOWN = 0; + LENGTH_PREFIXED = 1; + DELIMITED = 2; + } + optional MessageEncoding message_encoding = 5 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_FIELD, + targets = TARGET_TYPE_FILE, + edition_defaults = { edition: EDITION_PROTO2, value: "LENGTH_PREFIXED" } + ]; + + enum JsonFormat { + JSON_FORMAT_UNKNOWN = 0; + ALLOW = 1; + LEGACY_BEST_EFFORT = 2; + } + optional JsonFormat json_format = 6 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_MESSAGE, + targets = TARGET_TYPE_ENUM, + targets = TARGET_TYPE_FILE, + edition_defaults = { edition: EDITION_PROTO2, value: "LEGACY_BEST_EFFORT" }, + edition_defaults = { edition: EDITION_PROTO3, value: "ALLOW" } + ]; + + reserved 999; + + extensions 1000; // for Protobuf C++ + extensions 1001; // for Protobuf Java + + extensions 9995 to 9999; // For internal testing +} + +// A compiled specification for the defaults of a set of features. These +// messages are generated from FeatureSet extensions and can be used to seed +// feature resolution. The resolution with this object becomes a simple search +// for the closest matching edition, followed by proto merges. +message FeatureSetDefaults { + // A map from every known edition with a unique set of defaults to its + // defaults. Not all editions may be contained here. For a given edition, + // the defaults at the closest matching edition ordered at or before it should + // be used. This field must be in strict ascending order by edition. + message FeatureSetEditionDefault { + optional Edition edition = 3; + optional FeatureSet features = 2; + } + repeated FeatureSetEditionDefault defaults = 1; + + // The minimum supported edition (inclusive) when this was constructed. + // Editions before this will not have defaults. + optional Edition minimum_edition = 4; + + // The maximum known edition (inclusive) when this was constructed. Editions + // after this will not have reliable defaults. + optional Edition maximum_edition = 5; +} + +// =================================================================== +// Optional source code info + +// Encapsulates information about the original source file from which a +// FileDescriptorProto was generated. +message SourceCodeInfo { + // A Location identifies a piece of source code in a .proto file which + // corresponds to a particular definition. This information is intended + // to be useful to IDEs, code indexers, documentation generators, and similar + // tools. + // + // For example, say we have a file like: + // message Foo { + // optional string foo = 1; + // } + // Let's look at just the field definition: + // optional string foo = 1; + // ^ ^^ ^^ ^ ^^^ + // a bc de f ghi + // We have the following locations: + // span path represents + // [a,i) [ 4, 0, 2, 0 ] The whole field definition. + // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + // [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + // [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + // + // Notes: + // - A location may refer to a repeated field itself (i.e. not to any + // particular index within it). This is used whenever a set of elements are + // logically enclosed in a single code segment. For example, an entire + // extend block (possibly containing multiple extension definitions) will + // have an outer location whose path refers to the "extensions" repeated + // field without an index. + // - Multiple locations may have the same path. This happens when a single + // logical declaration is spread out across multiple places. The most + // obvious example is the "extend" block again -- there may be multiple + // extend blocks in the same scope, each of which will have the same path. + // - A location's span is not always a subset of its parent's span. For + // example, the "extendee" of an extension declaration appears at the + // beginning of the "extend" block and is shared by all extensions within + // the block. + // - Just because a location's span is a subset of some other location's span + // does not mean that it is a descendant. For example, a "group" defines + // both a type and a field in a single declaration. Thus, the locations + // corresponding to the type and field and their components will overlap. + // - Code which tries to interpret locations should probably be designed to + // ignore those that it doesn't understand, as more types of locations could + // be recorded in the future. + repeated Location location = 1; + message Location { + // Identifies which part of the FileDescriptorProto was defined at this + // location. + // + // Each element is a field number or an index. They form a path from + // the root FileDescriptorProto to the place where the definition occurs. + // For example, this path: + // [ 4, 3, 2, 7, 1 ] + // refers to: + // file.message_type(3) // 4, 3 + // .field(7) // 2, 7 + // .name() // 1 + // This is because FileDescriptorProto.message_type has field number 4: + // repeated DescriptorProto message_type = 4; + // and DescriptorProto.field has field number 2: + // repeated FieldDescriptorProto field = 2; + // and FieldDescriptorProto.name has field number 1: + // optional string name = 1; + // + // Thus, the above path gives the location of a field name. If we removed + // the last element: + // [ 4, 3, 2, 7 ] + // this path refers to the whole field declaration (from the beginning + // of the label to the terminating semicolon). + repeated int32 path = 1 [packed = true]; + + // Always has exactly three or four elements: start line, start column, + // end line (optional, otherwise assumed same as start line), end column. + // These are packed into a single field for efficiency. Note that line + // and column numbers are zero-based -- typically you will want to add + // 1 to each before displaying to a user. + repeated int32 span = 2 [packed = true]; + + // If this SourceCodeInfo represents a complete declaration, these are any + // comments appearing before and after the declaration which appear to be + // attached to the declaration. + // + // A series of line comments appearing on consecutive lines, with no other + // tokens appearing on those lines, will be treated as a single comment. + // + // leading_detached_comments will keep paragraphs of comments that appear + // before (but not connected to) the current element. Each paragraph, + // separated by empty lines, will be one comment element in the repeated + // field. + // + // Only the comment content is provided; comment markers (e.g. //) are + // stripped out. For block comments, leading whitespace and an asterisk + // will be stripped from the beginning of each line other than the first. + // Newlines are included in the output. + // + // Examples: + // + // optional int32 foo = 1; // Comment attached to foo. + // // Comment attached to bar. + // optional int32 bar = 2; + // + // optional string baz = 3; + // // Comment attached to baz. + // // Another line attached to baz. + // + // // Comment attached to moo. + // // + // // Another line attached to moo. + // optional double moo = 4; + // + // // Detached comment for corge. This is not leading or trailing comments + // // to moo or corge because there are blank lines separating it from + // // both. + // + // // Detached comment for corge paragraph 2. + // + // optional string corge = 5; + // /* Block comment attached + // * to corge. Leading asterisks + // * will be removed. */ + // /* Block comment attached to + // * grault. */ + // optional int32 grault = 6; + // + // // ignored detached comments. + optional string leading_comments = 3; + optional string trailing_comments = 4; + repeated string leading_detached_comments = 6; + } +} + +// Describes the relationship between generated code and its original source +// file. A GeneratedCodeInfo message is associated with only one generated +// source file, but may contain references to different source .proto files. +message GeneratedCodeInfo { + // An Annotation connects some span of text in generated code to an element + // of its generating .proto file. + repeated Annotation annotation = 1; + message Annotation { + // Identifies the element in the original source .proto file. This field + // is formatted the same as SourceCodeInfo.Location.path. + repeated int32 path = 1 [packed = true]; + + // Identifies the filesystem path to the original source .proto. + optional string source_file = 2; + + // Identifies the starting offset in bytes in the generated code + // that relates to the identified object. + optional int32 begin = 3; + + // Identifies the ending offset in bytes in the generated code that + // relates to the identified object. The end offset should be one past + // the last relevant byte (so the length of the text = end - begin). + optional int32 end = 4; + + // Represents the identified object's effect on the element in the original + // .proto file. + enum Semantic { + // There is no effect or the effect is indescribable. + NONE = 0; + // The element is set or otherwise mutated. + SET = 1; + // An alias to the element is returned. + ALIAS = 2; + } + optional Semantic semantic = 5; + } +} diff --git a/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/duration.proto b/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/duration.proto new file mode 100644 index 000000000000..41f40c22247d --- /dev/null +++ b/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/duration.proto @@ -0,0 +1,115 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/protobuf/types/known/durationpb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "DurationProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; + +// A Duration represents a signed, fixed-length span of time represented +// as a count of seconds and fractions of seconds at nanosecond +// resolution. It is independent of any calendar and concepts like "day" +// or "month". It is related to Timestamp in that the difference between +// two Timestamp values is a Duration and it can be added or subtracted +// from a Timestamp. Range is approximately +-10,000 years. +// +// # Examples +// +// Example 1: Compute Duration from two Timestamps in pseudo code. +// +// Timestamp start = ...; +// Timestamp end = ...; +// Duration duration = ...; +// +// duration.seconds = end.seconds - start.seconds; +// duration.nanos = end.nanos - start.nanos; +// +// if (duration.seconds < 0 && duration.nanos > 0) { +// duration.seconds += 1; +// duration.nanos -= 1000000000; +// } else if (duration.seconds > 0 && duration.nanos < 0) { +// duration.seconds -= 1; +// duration.nanos += 1000000000; +// } +// +// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. +// +// Timestamp start = ...; +// Duration duration = ...; +// Timestamp end = ...; +// +// end.seconds = start.seconds + duration.seconds; +// end.nanos = start.nanos + duration.nanos; +// +// if (end.nanos < 0) { +// end.seconds -= 1; +// end.nanos += 1000000000; +// } else if (end.nanos >= 1000000000) { +// end.seconds += 1; +// end.nanos -= 1000000000; +// } +// +// Example 3: Compute Duration from datetime.timedelta in Python. +// +// td = datetime.timedelta(days=3, minutes=10) +// duration = Duration() +// duration.FromTimedelta(td) +// +// # JSON Mapping +// +// In JSON format, the Duration type is encoded as a string rather than an +// object, where the string ends in the suffix "s" (indicating seconds) and +// is preceded by the number of seconds, with nanoseconds expressed as +// fractional seconds. For example, 3 seconds with 0 nanoseconds should be +// encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should +// be expressed in JSON format as "3.000000001s", and 3 seconds and 1 +// microsecond should be expressed in JSON format as "3.000001s". +// +message Duration { + // Signed seconds of the span of time. Must be from -315,576,000,000 + // to +315,576,000,000 inclusive. Note: these bounds are computed from: + // 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + int64 seconds = 1; + + // Signed fractions of a second at nanosecond resolution of the span + // of time. Durations less than one second are represented with a 0 + // `seconds` field and a positive or negative `nanos` field. For durations + // of one second or more, a non-zero value for the `nanos` field must be + // of the same sign as the `seconds` field. Must be from -999,999,999 + // to +999,999,999 inclusive. + int32 nanos = 2; +} diff --git a/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/empty.proto b/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/empty.proto new file mode 100644 index 000000000000..b87c89dcfce9 --- /dev/null +++ b/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/empty.proto @@ -0,0 +1,51 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option go_package = "google.golang.org/protobuf/types/known/emptypb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "EmptyProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option cc_enable_arenas = true; + +// A generic empty message that you can re-use to avoid defining duplicated +// empty messages in your APIs. A typical example is to use it as the request +// or the response type of an API method. For instance: +// +// service Foo { +// rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); +// } +// +message Empty {} diff --git a/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/field_mask.proto b/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/field_mask.proto new file mode 100644 index 000000000000..b28334b94392 --- /dev/null +++ b/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/field_mask.proto @@ -0,0 +1,245 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option java_package = "com.google.protobuf"; +option java_outer_classname = "FieldMaskProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option go_package = "google.golang.org/protobuf/types/known/fieldmaskpb"; +option cc_enable_arenas = true; + +// `FieldMask` represents a set of symbolic field paths, for example: +// +// paths: "f.a" +// paths: "f.b.d" +// +// Here `f` represents a field in some root message, `a` and `b` +// fields in the message found in `f`, and `d` a field found in the +// message in `f.b`. +// +// Field masks are used to specify a subset of fields that should be +// returned by a get operation or modified by an update operation. +// Field masks also have a custom JSON encoding (see below). +// +// # Field Masks in Projections +// +// When used in the context of a projection, a response message or +// sub-message is filtered by the API to only contain those fields as +// specified in the mask. For example, if the mask in the previous +// example is applied to a response message as follows: +// +// f { +// a : 22 +// b { +// d : 1 +// x : 2 +// } +// y : 13 +// } +// z: 8 +// +// The result will not contain specific values for fields x,y and z +// (their value will be set to the default, and omitted in proto text +// output): +// +// +// f { +// a : 22 +// b { +// d : 1 +// } +// } +// +// A repeated field is not allowed except at the last position of a +// paths string. +// +// If a FieldMask object is not present in a get operation, the +// operation applies to all fields (as if a FieldMask of all fields +// had been specified). +// +// Note that a field mask does not necessarily apply to the +// top-level response message. In case of a REST get operation, the +// field mask applies directly to the response, but in case of a REST +// list operation, the mask instead applies to each individual message +// in the returned resource list. In case of a REST custom method, +// other definitions may be used. Where the mask applies will be +// clearly documented together with its declaration in the API. In +// any case, the effect on the returned resource/resources is required +// behavior for APIs. +// +// # Field Masks in Update Operations +// +// A field mask in update operations specifies which fields of the +// targeted resource are going to be updated. The API is required +// to only change the values of the fields as specified in the mask +// and leave the others untouched. If a resource is passed in to +// describe the updated values, the API ignores the values of all +// fields not covered by the mask. +// +// If a repeated field is specified for an update operation, new values will +// be appended to the existing repeated field in the target resource. Note that +// a repeated field is only allowed in the last position of a `paths` string. +// +// If a sub-message is specified in the last position of the field mask for an +// update operation, then new value will be merged into the existing sub-message +// in the target resource. +// +// For example, given the target message: +// +// f { +// b { +// d: 1 +// x: 2 +// } +// c: [1] +// } +// +// And an update message: +// +// f { +// b { +// d: 10 +// } +// c: [2] +// } +// +// then if the field mask is: +// +// paths: ["f.b", "f.c"] +// +// then the result will be: +// +// f { +// b { +// d: 10 +// x: 2 +// } +// c: [1, 2] +// } +// +// An implementation may provide options to override this default behavior for +// repeated and message fields. +// +// In order to reset a field's value to the default, the field must +// be in the mask and set to the default value in the provided resource. +// Hence, in order to reset all fields of a resource, provide a default +// instance of the resource and set all fields in the mask, or do +// not provide a mask as described below. +// +// If a field mask is not present on update, the operation applies to +// all fields (as if a field mask of all fields has been specified). +// Note that in the presence of schema evolution, this may mean that +// fields the client does not know and has therefore not filled into +// the request will be reset to their default. If this is unwanted +// behavior, a specific service may require a client to always specify +// a field mask, producing an error if not. +// +// As with get operations, the location of the resource which +// describes the updated values in the request message depends on the +// operation kind. In any case, the effect of the field mask is +// required to be honored by the API. +// +// ## Considerations for HTTP REST +// +// The HTTP kind of an update operation which uses a field mask must +// be set to PATCH instead of PUT in order to satisfy HTTP semantics +// (PUT must only be used for full updates). +// +// # JSON Encoding of Field Masks +// +// In JSON, a field mask is encoded as a single string where paths are +// separated by a comma. Fields name in each path are converted +// to/from lower-camel naming conventions. +// +// As an example, consider the following message declarations: +// +// message Profile { +// User user = 1; +// Photo photo = 2; +// } +// message User { +// string display_name = 1; +// string address = 2; +// } +// +// In proto a field mask for `Profile` may look as such: +// +// mask { +// paths: "user.display_name" +// paths: "photo" +// } +// +// In JSON, the same mask is represented as below: +// +// { +// mask: "user.displayName,photo" +// } +// +// # Field Masks and Oneof Fields +// +// Field masks treat fields in oneofs just as regular fields. Consider the +// following message: +// +// message SampleMessage { +// oneof test_oneof { +// string name = 4; +// SubMessage sub_message = 9; +// } +// } +// +// The field mask can be: +// +// mask { +// paths: "name" +// } +// +// Or: +// +// mask { +// paths: "sub_message" +// } +// +// Note that oneof type names ("test_oneof" in this case) cannot be used in +// paths. +// +// ## Field Mask Verification +// +// The implementation of any API method which has a FieldMask type field in the +// request should verify the included field paths, and return an +// `INVALID_ARGUMENT` error if any path is unmappable. +message FieldMask { + // The set of field mask paths. + repeated string paths = 1; +} diff --git a/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/source_context.proto b/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/source_context.proto new file mode 100644 index 000000000000..135f50fea51c --- /dev/null +++ b/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/source_context.proto @@ -0,0 +1,48 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option java_package = "com.google.protobuf"; +option java_outer_classname = "SourceContextProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option go_package = "google.golang.org/protobuf/types/known/sourcecontextpb"; + +// `SourceContext` represents information about the source of a +// protobuf element, like the file in which it is defined. +message SourceContext { + // The path-qualified name of the .proto file that contained the associated + // protobuf element. For example: `"google/protobuf/source_context.proto"`. + string file_name = 1; +} diff --git a/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/struct.proto b/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/struct.proto new file mode 100644 index 000000000000..1bf0c1ad9586 --- /dev/null +++ b/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/struct.proto @@ -0,0 +1,95 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/protobuf/types/known/structpb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "StructProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; + +// `Struct` represents a structured data value, consisting of fields +// which map to dynamically typed values. In some languages, `Struct` +// might be supported by a native representation. For example, in +// scripting languages like JS a struct is represented as an +// object. The details of that representation are described together +// with the proto support for the language. +// +// The JSON representation for `Struct` is JSON object. +message Struct { + // Unordered map of dynamically typed values. + map fields = 1; +} + +// `Value` represents a dynamically typed value which can be either +// null, a number, a string, a boolean, a recursive struct value, or a +// list of values. A producer of value is expected to set one of these +// variants. Absence of any variant indicates an error. +// +// The JSON representation for `Value` is JSON value. +message Value { + // The kind of value. + oneof kind { + // Represents a null value. + NullValue null_value = 1; + // Represents a double value. + double number_value = 2; + // Represents a string value. + string string_value = 3; + // Represents a boolean value. + bool bool_value = 4; + // Represents a structured value. + Struct struct_value = 5; + // Represents a repeated `Value`. + ListValue list_value = 6; + } +} + +// `NullValue` is a singleton enumeration to represent the null value for the +// `Value` type union. +// +// The JSON representation for `NullValue` is JSON `null`. +enum NullValue { + // Null value. + NULL_VALUE = 0; +} + +// `ListValue` is a wrapper around a repeated field of values. +// +// The JSON representation for `ListValue` is JSON array. +message ListValue { + // Repeated field of dynamically typed values. + repeated Value values = 1; +} diff --git a/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/timestamp.proto b/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/timestamp.proto new file mode 100644 index 000000000000..fd0bc07dc3c9 --- /dev/null +++ b/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/timestamp.proto @@ -0,0 +1,144 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/protobuf/types/known/timestamppb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "TimestampProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; + +// A Timestamp represents a point in time independent of any time zone or local +// calendar, encoded as a count of seconds and fractions of seconds at +// nanosecond resolution. The count is relative to an epoch at UTC midnight on +// January 1, 1970, in the proleptic Gregorian calendar which extends the +// Gregorian calendar backwards to year one. +// +// All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap +// second table is needed for interpretation, using a [24-hour linear +// smear](https://developers.google.com/time/smear). +// +// The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By +// restricting to that range, we ensure that we can convert to and from [RFC +// 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. +// +// # Examples +// +// Example 1: Compute Timestamp from POSIX `time()`. +// +// Timestamp timestamp; +// timestamp.set_seconds(time(NULL)); +// timestamp.set_nanos(0); +// +// Example 2: Compute Timestamp from POSIX `gettimeofday()`. +// +// struct timeval tv; +// gettimeofday(&tv, NULL); +// +// Timestamp timestamp; +// timestamp.set_seconds(tv.tv_sec); +// timestamp.set_nanos(tv.tv_usec * 1000); +// +// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. +// +// FILETIME ft; +// GetSystemTimeAsFileTime(&ft); +// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; +// +// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z +// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. +// Timestamp timestamp; +// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); +// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); +// +// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. +// +// long millis = System.currentTimeMillis(); +// +// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) +// .setNanos((int) ((millis % 1000) * 1000000)).build(); +// +// Example 5: Compute Timestamp from Java `Instant.now()`. +// +// Instant now = Instant.now(); +// +// Timestamp timestamp = +// Timestamp.newBuilder().setSeconds(now.getEpochSecond()) +// .setNanos(now.getNano()).build(); +// +// Example 6: Compute Timestamp from current time in Python. +// +// timestamp = Timestamp() +// timestamp.GetCurrentTime() +// +// # JSON Mapping +// +// In JSON format, the Timestamp type is encoded as a string in the +// [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the +// format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" +// where {year} is always expressed using four digits while {month}, {day}, +// {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional +// seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), +// are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone +// is required. A proto3 JSON serializer should always use UTC (as indicated by +// "Z") when printing the Timestamp type and a proto3 JSON parser should be +// able to accept both UTC and other timezones (as indicated by an offset). +// +// For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past +// 01:30 UTC on January 15, 2017. +// +// In JavaScript, one can convert a Date object to this format using the +// standard +// [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) +// method. In Python, a standard `datetime.datetime` object can be converted +// to this format using +// [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with +// the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use +// the Joda Time's [`ISODateTimeFormat.dateTime()`]( +// http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime() +// ) to obtain a formatter capable of generating timestamps in this format. +// +message Timestamp { + // Represents seconds of UTC time since Unix epoch + // 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + // 9999-12-31T23:59:59Z inclusive. + int64 seconds = 1; + + // Non-negative fractions of a second at nanosecond resolution. Negative + // second values with fractions must still have non-negative nanos values + // that count forward in time. Must be from 0 to 999,999,999 + // inclusive. + int32 nanos = 2; +} diff --git a/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/type.proto b/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/type.proto new file mode 100644 index 000000000000..48cb11e75518 --- /dev/null +++ b/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/type.proto @@ -0,0 +1,193 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +import "google/protobuf/any.proto"; +import "google/protobuf/source_context.proto"; + +option cc_enable_arenas = true; +option java_package = "com.google.protobuf"; +option java_outer_classname = "TypeProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option go_package = "google.golang.org/protobuf/types/known/typepb"; + +// A protocol buffer message type. +message Type { + // The fully qualified message name. + string name = 1; + // The list of fields. + repeated Field fields = 2; + // The list of types appearing in `oneof` definitions in this type. + repeated string oneofs = 3; + // The protocol buffer options. + repeated Option options = 4; + // The source context. + SourceContext source_context = 5; + // The source syntax. + Syntax syntax = 6; + // The source edition string, only valid when syntax is SYNTAX_EDITIONS. + string edition = 7; +} + +// A single field of a message type. +message Field { + // Basic field types. + enum Kind { + // Field type unknown. + TYPE_UNKNOWN = 0; + // Field type double. + TYPE_DOUBLE = 1; + // Field type float. + TYPE_FLOAT = 2; + // Field type int64. + TYPE_INT64 = 3; + // Field type uint64. + TYPE_UINT64 = 4; + // Field type int32. + TYPE_INT32 = 5; + // Field type fixed64. + TYPE_FIXED64 = 6; + // Field type fixed32. + TYPE_FIXED32 = 7; + // Field type bool. + TYPE_BOOL = 8; + // Field type string. + TYPE_STRING = 9; + // Field type group. Proto2 syntax only, and deprecated. + TYPE_GROUP = 10; + // Field type message. + TYPE_MESSAGE = 11; + // Field type bytes. + TYPE_BYTES = 12; + // Field type uint32. + TYPE_UINT32 = 13; + // Field type enum. + TYPE_ENUM = 14; + // Field type sfixed32. + TYPE_SFIXED32 = 15; + // Field type sfixed64. + TYPE_SFIXED64 = 16; + // Field type sint32. + TYPE_SINT32 = 17; + // Field type sint64. + TYPE_SINT64 = 18; + } + + // Whether a field is optional, required, or repeated. + enum Cardinality { + // For fields with unknown cardinality. + CARDINALITY_UNKNOWN = 0; + // For optional fields. + CARDINALITY_OPTIONAL = 1; + // For required fields. Proto2 syntax only. + CARDINALITY_REQUIRED = 2; + // For repeated fields. + CARDINALITY_REPEATED = 3; + } + + // The field type. + Kind kind = 1; + // The field cardinality. + Cardinality cardinality = 2; + // The field number. + int32 number = 3; + // The field name. + string name = 4; + // The field type URL, without the scheme, for message or enumeration + // types. Example: `"type.googleapis.com/google.protobuf.Timestamp"`. + string type_url = 6; + // The index of the field type in `Type.oneofs`, for message or enumeration + // types. The first type has index 1; zero means the type is not in the list. + int32 oneof_index = 7; + // Whether to use alternative packed wire representation. + bool packed = 8; + // The protocol buffer options. + repeated Option options = 9; + // The field JSON name. + string json_name = 10; + // The string value of the default value of this field. Proto2 syntax only. + string default_value = 11; +} + +// Enum type definition. +message Enum { + // Enum type name. + string name = 1; + // Enum value definitions. + repeated EnumValue enumvalue = 2; + // Protocol buffer options. + repeated Option options = 3; + // The source context. + SourceContext source_context = 4; + // The source syntax. + Syntax syntax = 5; + // The source edition string, only valid when syntax is SYNTAX_EDITIONS. + string edition = 6; +} + +// Enum value definition. +message EnumValue { + // Enum value name. + string name = 1; + // Enum value number. + int32 number = 2; + // Protocol buffer options. + repeated Option options = 3; +} + +// A protocol buffer option, which can be attached to a message, field, +// enumeration, etc. +message Option { + // The option's name. For protobuf built-in options (options defined in + // descriptor.proto), this is the short name. For example, `"map_entry"`. + // For custom options, it should be the fully-qualified name. For example, + // `"google.api.http"`. + string name = 1; + // The option's value packed in an Any message. If the value is a primitive, + // the corresponding wrapper type defined in google/protobuf/wrappers.proto + // should be used. If the value is an enum, it should be stored as an int32 + // value using the google.protobuf.Int32Value type. + Any value = 2; +} + +// The syntax in which a protocol buffer element is defined. +enum Syntax { + // Syntax `proto2`. + SYNTAX_PROTO2 = 0; + // Syntax `proto3`. + SYNTAX_PROTO3 = 1; + // Syntax `editions`. + SYNTAX_EDITIONS = 2; +} diff --git a/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/wrappers.proto b/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/wrappers.proto new file mode 100644 index 000000000000..1959fa55a4e7 --- /dev/null +++ b/java/gandiva/target/protoc-dependencies/e63609fcacf52260efe2b231a85b86fe/google/protobuf/wrappers.proto @@ -0,0 +1,123 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Wrappers for primitive (non-message) types. These types are useful +// for embedding primitives in the `google.protobuf.Any` type and for places +// where we need to distinguish between the absence of a primitive +// typed field and its default value. +// +// These wrappers have no meaningful use within repeated fields as they lack +// the ability to detect presence on individual elements. +// These wrappers have no meaningful use within a map or a oneof since +// individual entries of a map or fields of a oneof can already detect presence. + +syntax = "proto3"; + +package google.protobuf; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/protobuf/types/known/wrapperspb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "WrappersProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; + +// Wrapper message for `double`. +// +// The JSON representation for `DoubleValue` is JSON number. +message DoubleValue { + // The double value. + double value = 1; +} + +// Wrapper message for `float`. +// +// The JSON representation for `FloatValue` is JSON number. +message FloatValue { + // The float value. + float value = 1; +} + +// Wrapper message for `int64`. +// +// The JSON representation for `Int64Value` is JSON string. +message Int64Value { + // The int64 value. + int64 value = 1; +} + +// Wrapper message for `uint64`. +// +// The JSON representation for `UInt64Value` is JSON string. +message UInt64Value { + // The uint64 value. + uint64 value = 1; +} + +// Wrapper message for `int32`. +// +// The JSON representation for `Int32Value` is JSON number. +message Int32Value { + // The int32 value. + int32 value = 1; +} + +// Wrapper message for `uint32`. +// +// The JSON representation for `UInt32Value` is JSON number. +message UInt32Value { + // The uint32 value. + uint32 value = 1; +} + +// Wrapper message for `bool`. +// +// The JSON representation for `BoolValue` is JSON `true` and `false`. +message BoolValue { + // The bool value. + bool value = 1; +} + +// Wrapper message for `string`. +// +// The JSON representation for `StringValue` is JSON string. +message StringValue { + // The string value. + string value = 1; +} + +// Wrapper message for `bytes`. +// +// The JSON representation for `BytesValue` is JSON string. +message BytesValue { + // The bytes value. + bytes value = 1; +} diff --git a/java/gandiva/target/protoc-plugins/protoc-3.23.1-osx-aarch_64.exe b/java/gandiva/target/protoc-plugins/protoc-3.23.1-osx-aarch_64.exe new file mode 100755 index 000000000000..9c2f8c62a974 Binary files /dev/null and b/java/gandiva/target/protoc-plugins/protoc-3.23.1-osx-aarch_64.exe differ diff --git a/java/gandiva/target/test-classes/logback.xml b/java/gandiva/target/test-classes/logback.xml new file mode 100644 index 000000000000..f9e449fa67b2 --- /dev/null +++ b/java/gandiva/target/test-classes/logback.xml @@ -0,0 +1,28 @@ + + + + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + + + + diff --git a/java/maven/module-info-compiler-maven-plugin/target/classes/META-INF/maven/org.apache.arrow.maven.plugins/module-info-compiler-maven-plugin/plugin-help.xml b/java/maven/module-info-compiler-maven-plugin/target/classes/META-INF/maven/org.apache.arrow.maven.plugins/module-info-compiler-maven-plugin/plugin-help.xml new file mode 100644 index 000000000000..ec04da1e7e1b --- /dev/null +++ b/java/maven/module-info-compiler-maven-plugin/target/classes/META-INF/maven/org.apache.arrow.maven.plugins/module-info-compiler-maven-plugin/plugin-help.xml @@ -0,0 +1,78 @@ + + + + + + Module Info Compiler Maven Plugin + + org.apache.arrow.maven.plugins + module-info-compiler-maven-plugin + 16.1.0 + module-info-compiler + + + compile + A maven plugin for compiler module-info files in main code with JDK8. + false + true + false + false + false + true + compile + org.apache.arrow.maven.plugins.ModuleInfoCompilerPlugin + java + per-lookup + once-per-session + false + + + compileSourceRoots + java.util.List<java.lang.String> + true + true + + + + skip + boolean + false + true + + + + + ${compileSourceRoots} + ${skip} + + + + testCompile + A maven plugin for compiler module-info files in unit tests with JDK8. + false + true + false + false + false + true + test-compile + org.apache.arrow.maven.plugins.ModuleInfoTestCompilerPlugin + java + per-lookup + once-per-session + false + + + skip + boolean + false + true + + + + + ${skip} + + + + \ No newline at end of file diff --git a/java/maven/module-info-compiler-maven-plugin/target/classes/META-INF/maven/plugin.xml b/java/maven/module-info-compiler-maven-plugin/target/classes/META-INF/maven/plugin.xml new file mode 100644 index 000000000000..a3df7b64abfe --- /dev/null +++ b/java/maven/module-info-compiler-maven-plugin/target/classes/META-INF/maven/plugin.xml @@ -0,0 +1,118 @@ + + + + + + Module Info Compiler Maven Plugin + + org.apache.arrow.maven.plugins + module-info-compiler-maven-plugin + 16.1.0 + module-info-compiler + false + true + 1.8 + 3.8.7 + + + compile + A maven plugin for compiler module-info files in main code with JDK8. + false + true + false + false + false + true + compile + org.apache.arrow.maven.plugins.ModuleInfoCompilerPlugin + java + per-lookup + once-per-session + false + + + compileSourceRoots + java.util.List + true + true + + + + project + org.apache.maven.project.MavenProject + true + false + + + + skip + boolean + false + true + + + + + ${compileSourceRoots} + + ${skip} + + + + testCompile + A maven plugin for compiler module-info files in unit tests with JDK8. + false + true + false + false + false + true + test-compile + org.apache.arrow.maven.plugins.ModuleInfoTestCompilerPlugin + java + per-lookup + once-per-session + false + + + project + org.apache.maven.project.MavenProject + true + false + + + + skip + boolean + false + true + + + + + + ${skip} + + + + + + org.glavo + module-info-compiler + jar + 2.0 + + + com.github.javaparser + javaparser-core + jar + 3.24.2 + + + org.ow2.asm + asm + jar + 9.3 + + + \ No newline at end of file diff --git a/java/maven/module-info-compiler-maven-plugin/target/classes/arrow-git.properties b/java/maven/module-info-compiler-maven-plugin/target/classes/arrow-git.properties new file mode 100644 index 000000000000..3a0e64573788 --- /dev/null +++ b/java/maven/module-info-compiler-maven-plugin/target/classes/arrow-git.properties @@ -0,0 +1,26 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=21.08.2025 @ 12\:10\:29 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Ra\u00FAl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 diff --git a/java/maven/module-info-compiler-maven-plugin/target/classes/org/apache/arrow/maven/plugins/BaseModuleInfoCompilerPlugin.class b/java/maven/module-info-compiler-maven-plugin/target/classes/org/apache/arrow/maven/plugins/BaseModuleInfoCompilerPlugin.class new file mode 100644 index 000000000000..2706223fd6b2 Binary files /dev/null and b/java/maven/module-info-compiler-maven-plugin/target/classes/org/apache/arrow/maven/plugins/BaseModuleInfoCompilerPlugin.class differ diff --git a/java/maven/module-info-compiler-maven-plugin/target/classes/org/apache/arrow/maven/plugins/ModuleInfoCompilerPlugin.class b/java/maven/module-info-compiler-maven-plugin/target/classes/org/apache/arrow/maven/plugins/ModuleInfoCompilerPlugin.class new file mode 100644 index 000000000000..2f67c7010ec7 Binary files /dev/null and b/java/maven/module-info-compiler-maven-plugin/target/classes/org/apache/arrow/maven/plugins/ModuleInfoCompilerPlugin.class differ diff --git a/java/maven/module-info-compiler-maven-plugin/target/classes/org/apache/arrow/maven/plugins/ModuleInfoTestCompilerPlugin.class b/java/maven/module-info-compiler-maven-plugin/target/classes/org/apache/arrow/maven/plugins/ModuleInfoTestCompilerPlugin.class new file mode 100644 index 000000000000..d13980e85af6 Binary files /dev/null and b/java/maven/module-info-compiler-maven-plugin/target/classes/org/apache/arrow/maven/plugins/ModuleInfoTestCompilerPlugin.class differ diff --git a/java/maven/module-info-compiler-maven-plugin/target/generated-sources/plugin/org/apache/arrow/maven/plugins/module_info_compiler_maven_plugin/HelpMojo.java b/java/maven/module-info-compiler-maven-plugin/target/generated-sources/plugin/org/apache/arrow/maven/plugins/module_info_compiler_maven_plugin/HelpMojo.java new file mode 100644 index 000000000000..dcfb92eac524 --- /dev/null +++ b/java/maven/module-info-compiler-maven-plugin/target/generated-sources/plugin/org/apache/arrow/maven/plugins/module_info_compiler_maven_plugin/HelpMojo.java @@ -0,0 +1,448 @@ +package org.apache.arrow.maven.plugins.module_info_compiler_maven_plugin; + +import org.apache.maven.plugin.AbstractMojo; +import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugins.annotations.Mojo; +import org.apache.maven.plugins.annotations.Parameter; + +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; +import org.xml.sax.SAXException; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.List; + +/** + * Display help information on module-info-compiler-maven-plugin.
+ * Call mvn module-info-compiler:help -Ddetail=true -Dgoal=<goal-name> to display parameter details. + * @author maven-plugin-tools + */ +@Mojo( name = "help", requiresProject = false, threadSafe = true ) +public class HelpMojo + extends AbstractMojo +{ + /** + * If true, display all settable properties for each goal. + * + */ + @Parameter( property = "detail", defaultValue = "false" ) + private boolean detail; + + /** + * The name of the goal for which to show help. If unspecified, all goals will be displayed. + * + */ + @Parameter( property = "goal" ) + private java.lang.String goal; + + /** + * The maximum length of a display line, should be positive. + * + */ + @Parameter( property = "lineLength", defaultValue = "80" ) + private int lineLength; + + /** + * The number of spaces per indentation level, should be positive. + * + */ + @Parameter( property = "indentSize", defaultValue = "2" ) + private int indentSize; + + // /META-INF/maven///plugin-help.xml + private static final String PLUGIN_HELP_PATH = + "/META-INF/maven/org.apache.arrow.maven.plugins/module-info-compiler-maven-plugin/plugin-help.xml"; + + private static final int DEFAULT_LINE_LENGTH = 80; + + private Document build() + throws MojoExecutionException + { + getLog().debug( "load plugin-help.xml: " + PLUGIN_HELP_PATH ); + try ( InputStream is = getClass().getResourceAsStream( PLUGIN_HELP_PATH ) ) + { + if ( is == null ) + { + throw new MojoExecutionException( "Could not find plugin descriptor at " + PLUGIN_HELP_PATH ); + } + DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); + DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); + return dBuilder.parse( is ); + } + catch ( IOException e ) + { + throw new MojoExecutionException( e.getMessage(), e ); + } + catch ( ParserConfigurationException e ) + { + throw new MojoExecutionException( e.getMessage(), e ); + } + catch ( SAXException e ) + { + throw new MojoExecutionException( e.getMessage(), e ); + } + } + + /** + * {@inheritDoc} + */ + @Override + public void execute() + throws MojoExecutionException + { + if ( lineLength <= 0 ) + { + getLog().warn( "The parameter 'lineLength' should be positive, using '80' as default." ); + lineLength = DEFAULT_LINE_LENGTH; + } + if ( indentSize <= 0 ) + { + getLog().warn( "The parameter 'indentSize' should be positive, using '2' as default." ); + indentSize = 2; + } + + Document doc = build(); + + StringBuilder sb = new StringBuilder(); + Node plugin = getSingleChild( doc, "plugin" ); + + + String name = getValue( plugin, "name" ); + String version = getValue( plugin, "version" ); + String id = getValue( plugin, "groupId" ) + ":" + getValue( plugin, "artifactId" ) + ":" + version; + if ( isNotEmpty( name ) && !name.contains( id ) ) + { + append( sb, name + " " + version, 0 ); + } + else + { + if ( isNotEmpty( name ) ) + { + append( sb, name, 0 ); + } + else + { + append( sb, id, 0 ); + } + } + append( sb, getValue( plugin, "description" ), 1 ); + append( sb, "", 0 ); + + //plugin + String goalPrefix = getValue( plugin, "goalPrefix" ); + + Node mojos1 = getSingleChild( plugin, "mojos" ); + + List mojos = findNamedChild( mojos1, "mojo" ); + + if ( goal == null || goal.length() <= 0 ) + { + append( sb, "This plugin has " + mojos.size() + ( mojos.size() > 1 ? " goals:" : " goal:" ), 0 ); + append( sb, "", 0 ); + } + + for ( Node mojo : mojos ) + { + writeGoal( sb, goalPrefix, (Element) mojo ); + } + + if ( getLog().isInfoEnabled() ) + { + getLog().info( sb.toString() ); + } + } + + + private static boolean isNotEmpty( String string ) + { + return string != null && string.length() > 0; + } + + private static String getValue( Node node, String elementName ) + throws MojoExecutionException + { + return getSingleChild( node, elementName ).getTextContent(); + } + + private static Node getSingleChild( Node node, String elementName ) + throws MojoExecutionException + { + List namedChild = findNamedChild( node, elementName ); + if ( namedChild.isEmpty() ) + { + throw new MojoExecutionException( "Could not find " + elementName + " in plugin-help.xml" ); + } + if ( namedChild.size() > 1 ) + { + throw new MojoExecutionException( "Multiple " + elementName + " in plugin-help.xml" ); + } + return namedChild.get( 0 ); + } + + private static List findNamedChild( Node node, String elementName ) + { + List result = new ArrayList(); + NodeList childNodes = node.getChildNodes(); + for ( int i = 0; i < childNodes.getLength(); i++ ) + { + Node item = childNodes.item( i ); + if ( elementName.equals( item.getNodeName() ) ) + { + result.add( item ); + } + } + return result; + } + + private static Node findSingleChild( Node node, String elementName ) + throws MojoExecutionException + { + List elementsByTagName = findNamedChild( node, elementName ); + if ( elementsByTagName.isEmpty() ) + { + return null; + } + if ( elementsByTagName.size() > 1 ) + { + throw new MojoExecutionException( "Multiple " + elementName + "in plugin-help.xml" ); + } + return elementsByTagName.get( 0 ); + } + + private void writeGoal( StringBuilder sb, String goalPrefix, Element mojo ) + throws MojoExecutionException + { + String mojoGoal = getValue( mojo, "goal" ); + Node configurationElement = findSingleChild( mojo, "configuration" ); + Node description = findSingleChild( mojo, "description" ); + if ( goal == null || goal.length() <= 0 || mojoGoal.equals( goal ) ) + { + append( sb, goalPrefix + ":" + mojoGoal, 0 ); + Node deprecated = findSingleChild( mojo, "deprecated" ); + if ( ( deprecated != null ) && isNotEmpty( deprecated.getTextContent() ) ) + { + append( sb, "Deprecated. " + deprecated.getTextContent(), 1 ); + if ( detail && description != null ) + { + append( sb, "", 0 ); + append( sb, description.getTextContent(), 1 ); + } + } + else if ( description != null ) + { + append( sb, description.getTextContent(), 1 ); + } + append( sb, "", 0 ); + + if ( detail ) + { + Node parametersNode = getSingleChild( mojo, "parameters" ); + List parameters = findNamedChild( parametersNode, "parameter" ); + append( sb, "Available parameters:", 1 ); + append( sb, "", 0 ); + + for ( Node parameter : parameters ) + { + writeParameter( sb, parameter, configurationElement ); + } + } + } + } + + private void writeParameter( StringBuilder sb, Node parameter, Node configurationElement ) + throws MojoExecutionException + { + String parameterName = getValue( parameter, "name" ); + String parameterDescription = getValue( parameter, "description" ); + + Element fieldConfigurationElement = null; + if ( configurationElement != null ) + { + fieldConfigurationElement = (Element) findSingleChild( configurationElement, parameterName ); + } + + String parameterDefaultValue = ""; + if ( fieldConfigurationElement != null && fieldConfigurationElement.hasAttribute( "default-value" ) ) + { + parameterDefaultValue = " (Default: " + fieldConfigurationElement.getAttribute( "default-value" ) + ")"; + } + append( sb, parameterName + parameterDefaultValue, 2 ); + Node deprecated = findSingleChild( parameter, "deprecated" ); + if ( ( deprecated != null ) && isNotEmpty( deprecated.getTextContent() ) ) + { + append( sb, "Deprecated. " + deprecated.getTextContent(), 3 ); + append( sb, "", 0 ); + } + if ( isNotEmpty( parameterDescription ) ) { + append( sb, parameterDescription, 3 ); + } + if ( "true".equals( getValue( parameter, "required" ) ) ) + { + append( sb, "Required: Yes", 3 ); + } + if ( ( fieldConfigurationElement != null ) && isNotEmpty( fieldConfigurationElement.getTextContent() ) ) + { + String property = getPropertyFromExpression( fieldConfigurationElement.getTextContent() ); + append( sb, "User property: " + property, 3 ); + } + + append( sb, "", 0 ); + } + + /** + *

Repeat a String n times to form a new string.

+ * + * @param str String to repeat + * @param repeat number of times to repeat str + * @return String with repeated String + * @throws NegativeArraySizeException if repeat < 0 + * @throws NullPointerException if str is null + */ + private static String repeat( String str, int repeat ) + { + StringBuilder buffer = new StringBuilder( repeat * str.length() ); + + for ( int i = 0; i < repeat; i++ ) + { + buffer.append( str ); + } + + return buffer.toString(); + } + + /** + * Append a description to the buffer by respecting the indentSize and lineLength parameters. + * Note: The last character is always a new line. + * + * @param sb The buffer to append the description, not null. + * @param description The description, not null. + * @param indent The base indentation level of each line, must not be negative. + */ + private void append( StringBuilder sb, String description, int indent ) + { + for ( String line : toLines( description, indent, indentSize, lineLength ) ) + { + sb.append( line ).append( '\n' ); + } + } + + /** + * Splits the specified text into lines of convenient display length. + * + * @param text The text to split into lines, must not be null. + * @param indent The base indentation level of each line, must not be negative. + * @param indentSize The size of each indentation, must not be negative. + * @param lineLength The length of the line, must not be negative. + * @return The sequence of display lines, never null. + * @throws NegativeArraySizeException if indent < 0 + */ + private static List toLines( String text, int indent, int indentSize, int lineLength ) + { + List lines = new ArrayList(); + + String ind = repeat( "\t", indent ); + + String[] plainLines = text.split( "(\r\n)|(\r)|(\n)" ); + + for ( String plainLine : plainLines ) + { + toLines( lines, ind + plainLine, indentSize, lineLength ); + } + + return lines; + } + + /** + * Adds the specified line to the output sequence, performing line wrapping if necessary. + * + * @param lines The sequence of display lines, must not be null. + * @param line The line to add, must not be null. + * @param indentSize The size of each indentation, must not be negative. + * @param lineLength The length of the line, must not be negative. + */ + private static void toLines( List lines, String line, int indentSize, int lineLength ) + { + int lineIndent = getIndentLevel( line ); + StringBuilder buf = new StringBuilder( 256 ); + + String[] tokens = line.split( " +" ); + + for ( String token : tokens ) + { + if ( buf.length() > 0 ) + { + if ( buf.length() + token.length() >= lineLength ) + { + lines.add( buf.toString() ); + buf.setLength( 0 ); + buf.append( repeat( " ", lineIndent * indentSize ) ); + } + else + { + buf.append( ' ' ); + } + } + + for ( int j = 0; j < token.length(); j++ ) + { + char c = token.charAt( j ); + if ( c == '\t' ) + { + buf.append( repeat( " ", indentSize - buf.length() % indentSize ) ); + } + else if ( c == '\u00A0' ) + { + buf.append( ' ' ); + } + else + { + buf.append( c ); + } + } + } + lines.add( buf.toString() ); + } + + /** + * Gets the indentation level of the specified line. + * + * @param line The line whose indentation level should be retrieved, must not be null. + * @return The indentation level of the line. + */ + private static int getIndentLevel( String line ) + { + int level = 0; + for ( int i = 0; i < line.length() && line.charAt( i ) == '\t'; i++ ) + { + level++; + } + for ( int i = level + 1; i <= level + 4 && i < line.length(); i++ ) + { + if ( line.charAt( i ) == '\t' ) + { + level++; + break; + } + } + return level; + } + + private static String getPropertyFromExpression( String expression ) + { + if ( expression != null && expression.startsWith( "${" ) && expression.endsWith( "}" ) + && !expression.substring( 2 ).contains( "${" ) ) + { + // expression="${xxx}" -> property="xxx" + return expression.substring( 2, expression.length() - 1 ); + } + // no property can be extracted + return null; + } +} diff --git a/java/maven/module-info-compiler-maven-plugin/target/plugin-enhanced.xml b/java/maven/module-info-compiler-maven-plugin/target/plugin-enhanced.xml new file mode 100644 index 000000000000..9a1765bb172c --- /dev/null +++ b/java/maven/module-info-compiler-maven-plugin/target/plugin-enhanced.xml @@ -0,0 +1,118 @@ + + + + + + Module Info Compiler Maven Plugin + + org.apache.arrow.maven.plugins + module-info-compiler-maven-plugin + 16.1.0 + module-info-compiler + false + true + 1.8 + 3.8.7 + + + compile + A maven plugin for compiler module-info files in main code with JDK8. + false + true + false + false + false + true + compile + org.apache.arrow.maven.plugins.ModuleInfoCompilerPlugin + java + per-lookup + once-per-session + false + + + compileSourceRoots + java.util.List<java.lang.String> + true + true + + + + project + org.apache.maven.project.MavenProject + true + false + + + + skip + boolean + false + true + + + + + ${compileSourceRoots} + + ${skip} + + + + testCompile + A maven plugin for compiler module-info files in unit tests with JDK8. + false + true + false + false + false + true + test-compile + org.apache.arrow.maven.plugins.ModuleInfoTestCompilerPlugin + java + per-lookup + once-per-session + false + + + project + org.apache.maven.project.MavenProject + true + false + + + + skip + boolean + false + true + + + + + + ${skip} + + + + + + org.glavo + module-info-compiler + jar + 2.0 + + + com.github.javaparser + javaparser-core + jar + 3.24.2 + + + org.ow2.asm + asm + jar + 9.3 + + + \ No newline at end of file diff --git a/java/memory/memory-core/target/classes/arrow-git.properties b/java/memory/memory-core/target/classes/arrow-git.properties new file mode 100644 index 000000000000..5bb7b2d81258 --- /dev/null +++ b/java/memory/memory-core/target/classes/arrow-git.properties @@ -0,0 +1,26 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=22.08.2025 @ 15\:37\:19 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Ra\u00FAl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 diff --git a/java/memory/memory-core/target/test-classes/logback.xml b/java/memory/memory-core/target/test-classes/logback.xml new file mode 100644 index 000000000000..4c54d18a210f --- /dev/null +++ b/java/memory/memory-core/target/test-classes/logback.xml @@ -0,0 +1,28 @@ + + + + + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + + + diff --git a/java/memory/memory-netty-buffer-patch/target/classes/arrow-git.properties b/java/memory/memory-netty-buffer-patch/target/classes/arrow-git.properties new file mode 100644 index 000000000000..90958e27cdef --- /dev/null +++ b/java/memory/memory-netty-buffer-patch/target/classes/arrow-git.properties @@ -0,0 +1,26 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=21.08.2025 @ 12\:10\:16 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Ra\u00FAl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 diff --git a/java/memory/memory-netty/target/classes/arrow-git.properties b/java/memory/memory-netty/target/classes/arrow-git.properties new file mode 100644 index 000000000000..79129b4483ab --- /dev/null +++ b/java/memory/memory-netty/target/classes/arrow-git.properties @@ -0,0 +1,26 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=21.08.2025 @ 12\:10\:17 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Ra\u00FAl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 diff --git a/java/memory/memory-netty/target/test-classes/logback.xml b/java/memory/memory-netty/target/test-classes/logback.xml new file mode 100644 index 000000000000..4c54d18a210f --- /dev/null +++ b/java/memory/memory-netty/target/test-classes/logback.xml @@ -0,0 +1,28 @@ + + + + + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + + + diff --git a/java/memory/memory-unsafe/target/classes/arrow-git.properties b/java/memory/memory-unsafe/target/classes/arrow-git.properties new file mode 100644 index 000000000000..1e0422e29ccf --- /dev/null +++ b/java/memory/memory-unsafe/target/classes/arrow-git.properties @@ -0,0 +1,26 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=21.08.2025 @ 12\:10\:18 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Ra\u00FAl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 diff --git a/java/memory/memory-unsafe/target/test-classes/logback.xml b/java/memory/memory-unsafe/target/test-classes/logback.xml new file mode 100644 index 000000000000..4c54d18a210f --- /dev/null +++ b/java/memory/memory-unsafe/target/test-classes/logback.xml @@ -0,0 +1,28 @@ + + + + + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + + + diff --git a/java/performance/target/classes/arrow-git.properties b/java/performance/target/classes/arrow-git.properties new file mode 100644 index 000000000000..5138abdecdf0 --- /dev/null +++ b/java/performance/target/classes/arrow-git.properties @@ -0,0 +1,26 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=21.08.2025 @ 12\:10\:20 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Ra\u00FAl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 diff --git a/java/tools/target/classes/arrow-git.properties b/java/tools/target/classes/arrow-git.properties new file mode 100644 index 000000000000..5138abdecdf0 --- /dev/null +++ b/java/tools/target/classes/arrow-git.properties @@ -0,0 +1,26 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=21.08.2025 @ 12\:10\:20 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Ra\u00FAl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 diff --git a/java/tools/target/test-classes/logback.xml b/java/tools/target/test-classes/logback.xml new file mode 100644 index 000000000000..ff848da2a8be --- /dev/null +++ b/java/tools/target/test-classes/logback.xml @@ -0,0 +1,27 @@ + + + + + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + + diff --git a/java/vector/target/classes/arrow-git.properties b/java/vector/target/classes/arrow-git.properties new file mode 100644 index 000000000000..c875d071416f --- /dev/null +++ b/java/vector/target/classes/arrow-git.properties @@ -0,0 +1,26 @@ +#Generated by Git-Commit-Id-Plugin +git.branch=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.build.host=Christopher-Pride-MacBook-Pro-16-inch-Nov-2023- +git.build.time=21.08.2025 @ 12\:10\:22 PDT +git.build.user.email=cpride@cpride.net +git.build.user.name=Chris Pride +git.build.version=16.1.0 +git.closest.tag.commit.count=0 +git.closest.tag.name=apache-arrow-16.1.0 +git.commit.author.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.committer.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.id=7dd1d34074af176d9e861a360e135ae57b21cf96 +git.commit.id.abbrev=7dd1d34 +git.commit.id.describe=apache-arrow-16.1.0-0-g7dd1d34 +git.commit.id.describe-short=apache-arrow-16.1.0-0 +git.commit.message.full=MINOR\: [Release] Update versions for 16.1.0 +git.commit.message.short=MINOR\: [Release] Update versions for 16.1.0 +git.commit.time=09.05.2024 @ 00\:21\:29 PDT +git.commit.user.email=raulcumplido@gmail.com +git.commit.user.name=Ra\u00FAl Cumplido +git.dirty=false +git.local.branch.ahead=NO_REMOTE +git.local.branch.behind=NO_REMOTE +git.remote.origin.url=git@github.com\:apache/arrow.git +git.tags=apache-arrow-16.1.0,go/v16.1.0,r-16.1.0 +git.total.commit.count=15794 diff --git a/java/vector/target/classes/codegen/config.fmpp b/java/vector/target/classes/codegen/config.fmpp new file mode 100644 index 000000000000..ef5a5072a75a --- /dev/null +++ b/java/vector/target/classes/codegen/config.fmpp @@ -0,0 +1,24 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +data: { + # TODO: Rename to ~valueVectorModesAndTypes for clarity. + vv: tdd(../data/ValueVectorTypes.tdd), + arrowTypes: tdd(../data/ArrowTypes.tdd) + +} +freemarkerLinks: { + includes: includes/ +} diff --git a/java/vector/target/classes/codegen/data/ArrowTypes.tdd b/java/vector/target/classes/codegen/data/ArrowTypes.tdd new file mode 100644 index 000000000000..3cf9a968791a --- /dev/null +++ b/java/vector/target/classes/codegen/data/ArrowTypes.tdd @@ -0,0 +1,124 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +{ + types: [ + { + name: "Null", + fields: [], + complex: false + }, + { + name: "Struct_", + fields: [], + complex: true + }, + { + name: "List", + fields: [], + complex: true + }, + { + name: "LargeList", + fields: [], + complex: true + }, + { + name: "FixedSizeList", + fields: [{name: "listSize", type: int}], + complex: true + }, + { + name: "Union", + fields: [{name: "mode", type: short, valueType: UnionMode}, {name: "typeIds", type: "int[]"}], + complex: true + }, + { + name: "Map", + fields: [{name: "keysSorted", type: boolean}], + complex: true + }, + { + name: "Int", + fields: [{name: "bitWidth", type: int}, {name: "isSigned", type: boolean}], + complex: false + }, + { + name: "FloatingPoint", + fields: [{name: precision, type: short, valueType: FloatingPointPrecision}], + complex: false + }, + { + name: "Utf8", + fields: [], + complex: false + }, + { + name: "LargeUtf8", + fields: [], + complex: false + }, + { + name: "Binary", + fields: [], + complex: false + }, + { + name: "LargeBinary", + fields: [], + complex: false + }, + { + name: "FixedSizeBinary", + fields: [{name: "byteWidth", type: int}], + complex: false + } + { + name: "Bool", + fields: [], + complex: false + }, + { + name: "Decimal", + fields: [{name: "precision", type: int}, {name: "scale", type: int}, {name: "bitWidth", type: int}], + complex: false + }, + { + name: "Date", + fields: [{name: "unit", type: short, valueType: DateUnit}] + complex: false + }, + { + name: "Time", + fields: [{name: "unit", type: short, valueType: TimeUnit}, {name: "bitWidth", type: int}], + complex: false + }, + { + name: "Timestamp", + fields: [{name: "unit", type: short, valueType: TimeUnit}, {name: "timezone", type: String}] + complex: false + }, + { + name: "Interval", + fields: [{name: "unit", type: short, valueType: IntervalUnit}], + complex: false + }, + { + name: "Duration", + fields: [{name: "unit", type: short, valueType: TimeUnit}], + complex: false + } + ] +} diff --git a/java/vector/target/classes/codegen/data/ValueVectorTypes.tdd b/java/vector/target/classes/codegen/data/ValueVectorTypes.tdd new file mode 100644 index 000000000000..6c2a96771245 --- /dev/null +++ b/java/vector/target/classes/codegen/data/ValueVectorTypes.tdd @@ -0,0 +1,216 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +{ + modes: [ + {name: "Optional", prefix: "Nullable"}, + {name: "Required", prefix: ""} + ], + types: [ + { + major: "Fixed", + width: 1, + javaType: "byte", + boxedType: "Byte", + fields: [{name: "value", type: "byte"}], + minor: [ + { class: "TinyInt", valueHolder: "IntHolder" }, + { class: "UInt1", valueHolder: "UInt1Holder" } + ] + }, + { + major: "Fixed", + width: 2, + javaType: "char", + boxedType: "Character", + fields: [{name: "value", type: "char"}], + minor: [ + { class: "UInt2", valueHolder: "UInt2Holder"} + ] + }, { + major: "Fixed", + width: 2, + javaType: "short", + boxedType: "Short", + fields: [{name: "value", type: "short"}], + minor: [ + { class: "SmallInt", valueHolder: "Int2Holder"}, + ] + }, + { + major: "Fixed", + width: 2, + javaType: "short", + boxedType: "Short", + fields: [{name: "value", type: "short"}], + minor: [ + { class: "Float2", valueHolder: "Int2Holder"}, + ] + }, + { + major: "Fixed", + width: 4, + javaType: "int", + boxedType: "Integer", + fields: [{name: "value", type: "int"}], + minor: [ + { class: "Int", valueHolder: "IntHolder"}, + { class: "UInt4", valueHolder: "UInt4Holder" }, + { class: "Float4", javaType: "float" , boxedType: "Float", fields: [{name: "value", type: "float"}]}, + { class: "DateDay" }, + { class: "IntervalYear", javaType: "int", friendlyType: "Period" }, + { class: "TimeSec" }, + { class: "TimeMilli", javaType: "int", friendlyType: "LocalDateTime" } + ] + }, + { + major: "Fixed", + width: 8, + javaType: "long", + boxedType: "Long", + fields: [{name: "value", type: "long"}], + minor: [ + { class: "BigInt"}, + { class: "UInt8" }, + { class: "Float8", javaType: "double", boxedType: "Double", fields: [{name: "value", type: "double"}] }, + { class: "DateMilli", javaType: "long", friendlyType: "LocalDateTime" }, + { class: "Duration", javaType: "long", friendlyType: "Duration", + arrowType: "org.apache.arrow.vector.types.pojo.ArrowType.Duration", + typeParams: [ {name: "unit", type: "org.apache.arrow.vector.types.TimeUnit"} ], + arrowTypeConstructorParams: ["unit"]} + { class: "TimeStampSec", javaType: "long", boxedType: "Long", friendlyType: "LocalDateTime" }, + { class: "TimeStampMilli", javaType: "long", boxedType: "Long", friendlyType: "LocalDateTime" }, + { class: "TimeStampMicro", javaType: "long", boxedType: "Long", friendlyType: "LocalDateTime" }, + { class: "TimeStampNano", javaType: "long", boxedType: "Long", friendlyType: "LocalDateTime" }, + { class: "TimeStampSecTZ", javaType: "long", boxedType: "Long", + typeParams: [ {name: "timezone", type: "String"} ], + arrowType: "org.apache.arrow.vector.types.pojo.ArrowType.Timestamp", + arrowTypeConstructorParams: ["org.apache.arrow.vector.types.TimeUnit.SECOND", "timezone"] }, + { class: "TimeStampMilliTZ", javaType: "long", boxedType: "Long", + typeParams: [ {name: "timezone", type: "String"} ], + arrowType: "org.apache.arrow.vector.types.pojo.ArrowType.Timestamp", + arrowTypeConstructorParams: ["org.apache.arrow.vector.types.TimeUnit.MILLISECOND", "timezone"] }, + { class: "TimeStampMicroTZ", javaType: "long", boxedType: "Long", + typeParams: [ {name: "timezone", type: "String"} ], + arrowType: "org.apache.arrow.vector.types.pojo.ArrowType.Timestamp", + arrowTypeConstructorParams: ["org.apache.arrow.vector.types.TimeUnit.MICROSECOND", "timezone"] }, + { class: "TimeStampNanoTZ", javaType: "long", boxedType: "Long", + typeParams: [ {name: "timezone", type: "String"} ], + arrowType: "org.apache.arrow.vector.types.pojo.ArrowType.Timestamp", + arrowTypeConstructorParams: ["org.apache.arrow.vector.types.TimeUnit.NANOSECOND", "timezone"] }, + { class: "TimeMicro" }, + { class: "TimeNano" } + ] + }, + { + major: "Fixed", + width: 8, + javaType: "ArrowBuf", + boxedType: "ArrowBuf", + minor: [ + { class: "IntervalDay", millisecondsOffset: 4, friendlyType: "Duration", fields: [ {name: "days", type:"int"}, {name: "milliseconds", type:"int"}] } + ] + }, + { + major: "Fixed", + width: 16, + javaType: "ArrowBuf", + boxedType: "ArrowBuf", + minor: [ + { class: "IntervalMonthDayNano", daysOffset: 4, nanosecondsOffset: 8, friendlyType: "PeriodDuration", fields: [ {name: "months", type:"int"}, {name: "days", type:"int"}, {name: "nanoseconds", type:"long"}] } + ] + }, + + { + major: "Fixed", + width: 32, + javaType: "ArrowBuf", + boxedType: "ArrowBuf", + + minor: [ + { + class: "Decimal256", + maxPrecisionDigits: 76, nDecimalDigits: 4, friendlyType: "BigDecimal", + typeParams: [ {name: "scale", type: "int"}, { name: "precision", type: "int"}], + arrowType: "org.apache.arrow.vector.types.pojo.ArrowType.Decimal", + fields: [{name: "start", type: "long"}, {name: "buffer", type: "ArrowBuf"}] + } + ] + }, + { + major: "Fixed", + width: 16, + javaType: "ArrowBuf", + boxedType: "ArrowBuf", + + minor: [ + { + class: "Decimal", + maxPrecisionDigits: 38, nDecimalDigits: 4, friendlyType: "BigDecimal", + typeParams: [ {name: "scale", type: "int"}, { name: "precision", type: "int"}], + arrowType: "org.apache.arrow.vector.types.pojo.ArrowType.Decimal", + fields: [{name: "start", type: "long"}, {name: "buffer", type: "ArrowBuf"}] + } + ] + }, + + { + major: "Fixed", + width: -1, + javaType: "byte[]", + boxedType: "ArrowBuf", + minor: [ + { + class: "FixedSizeBinary", + typeParams: [ {name: "byteWidth", type: "int"} ], + arrowType: "org.apache.arrow.vector.types.pojo.ArrowType.FixedSizeBinary", + friendlyType: "byte[]", + fields: [{name: "buffer", type: "ArrowBuf"}], + } + ] + }, + { + major: "VarLen", + width: 4, + javaType: "int", + boxedType: "ArrowBuf", + fields: [{name: "start", type: "int"}, {name: "end", type: "int"}, {name: "buffer", type: "ArrowBuf"}], + minor: [ + { class: "VarBinary" , friendlyType: "byte[]" }, + { class: "VarChar" , friendlyType: "Text" } + ] + }, + { + major: "VarLen", + width: 8, + javaType: "long", + boxedType: "ArrowBuf", + fields: [{name: "start", type: "long"}, {name: "end", type: "long"}, {name: "buffer", type: "ArrowBuf"}], + minor: [ + { class: "LargeVarChar" , friendlyType: "Text" } + { class: "LargeVarBinary" , friendlyType: "byte[]" } + ] + }, + { + major: "Bit", + width: 1, + javaType: "int", + boxedType: "Integer", + minor: [ + { class: "Bit" , friendlyType: "Boolean", fields: [{name: "value", type: "int"}] } + ] + } + ] +} diff --git a/java/vector/target/classes/codegen/includes/license.ftl b/java/vector/target/classes/codegen/includes/license.ftl new file mode 100644 index 000000000000..c6a5afeef509 --- /dev/null +++ b/java/vector/target/classes/codegen/includes/license.ftl @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ \ No newline at end of file diff --git a/java/vector/target/classes/codegen/includes/vv_imports.ftl b/java/vector/target/classes/codegen/includes/vv_imports.ftl new file mode 100644 index 000000000000..f4c72a1a6cba --- /dev/null +++ b/java/vector/target/classes/codegen/includes/vv_imports.ftl @@ -0,0 +1,58 @@ +<#-- + ~ Licensed to the Apache Software Foundation (ASF) under one or more + ~ contributor license agreements. See the NOTICE file distributed with + ~ this work for additional information regarding copyright ownership. + ~ The ASF licenses this file to You under the Apache License, Version 2.0 + ~ (the "License"); you may not use this file except in compliance with + ~ the License. You may obtain a copy of the License at + ~ + ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ + ~ Unless required by applicable law or agreed to in writing, software + ~ distributed under the License is distributed on an "AS IS" BASIS, + ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + ~ See the License for the specific language governing permissions and + ~ limitations under the License. + --> + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + diff --git a/java/vector/target/classes/codegen/templates/AbstractFieldReader.java b/java/vector/target/classes/codegen/templates/AbstractFieldReader.java new file mode 100644 index 000000000000..e3c8729469c7 --- /dev/null +++ b/java/vector/target/classes/codegen/templates/AbstractFieldReader.java @@ -0,0 +1,132 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/AbstractFieldReader.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + +/** + * Source code generated using FreeMarker template ${.template_name} + */ +@SuppressWarnings("unused") +abstract class AbstractFieldReader extends AbstractBaseReader implements FieldReader{ + + AbstractFieldReader(){ + super(); + } + + /** + * Returns true if the current value of the reader is not null + * @return whether the current value is set + */ + public boolean isSet() { + return true; + } + + @Override + public Field getField() { + fail("getField"); + return null; + } + + <#list ["Object", "BigDecimal", "Short", "Integer", "Long", "Boolean", + "LocalDateTime", "Duration", "Period", "Double", "Float", + "Character", "Text", "String", "Byte", "byte[]", "PeriodDuration"] as friendlyType> + <#assign safeType=friendlyType /> + <#if safeType=="byte[]"><#assign safeType="ByteArray" /> + public ${friendlyType} read${safeType}(int arrayIndex) { + fail("read${safeType}(int arrayIndex)"); + return null; + } + + public ${friendlyType} read${safeType}() { + fail("read${safeType}()"); + return null; + } + + + public void copyAsValue(StructWriter writer) { + fail("CopyAsValue StructWriter"); + } + + public void copyAsField(String name, StructWriter writer) { + fail("CopyAsField StructWriter"); + } + + public void copyAsField(String name, ListWriter writer) { + fail("CopyAsFieldList"); + } + + public void copyAsField(String name, MapWriter writer) { + fail("CopyAsFieldMap"); + } + + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> + <#assign boxedType = (minor.boxedType!type.boxedType) /> + public void read(${name}Holder holder) { + fail("${name}"); + } + + public void read(Nullable${name}Holder holder) { + fail("${name}"); + } + + public void read(int arrayIndex, ${name}Holder holder) { + fail("Repeated${name}"); + } + + public void read(int arrayIndex, Nullable${name}Holder holder) { + fail("Repeated${name}"); + } + + public void copyAsValue(${name}Writer writer) { + fail("CopyAsValue${name}"); + } + + public void copyAsField(String name, ${name}Writer writer) { + fail("CopyAsField${name}"); + } + + + public FieldReader reader(String name) { + fail("reader(String name)"); + return null; + } + + public FieldReader reader() { + fail("reader()"); + return null; + } + + public int size() { + fail("size()"); + return -1; + } + + private void fail(String name) { + throw new IllegalArgumentException(String.format("You tried to read a [%s] type when you are using a field reader of type [%s].", name, this.getClass().getSimpleName())); + } +} + + + diff --git a/java/vector/target/classes/codegen/templates/AbstractFieldWriter.java b/java/vector/target/classes/codegen/templates/AbstractFieldWriter.java new file mode 100644 index 000000000000..6c2368117f7c --- /dev/null +++ b/java/vector/target/classes/codegen/templates/AbstractFieldWriter.java @@ -0,0 +1,261 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/AbstractFieldWriter.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + +/* + * This class is generated using freemarker and the ${.template_name} template. + * Note that changes to the AbstractFieldWriter template should also get reflected in the + * AbstractPromotableFieldWriter, ComplexWriters, UnionFixedSizeListWriter, UnionListWriter + * and UnionWriter templates and the PromotableWriter concrete code. + */ +@SuppressWarnings("unused") +abstract class AbstractFieldWriter extends AbstractBaseWriter implements FieldWriter { + + protected boolean addVectorAsNullable = true; + + /** + * Set flag to control the FieldType.nullable property when a writer creates a new vector. + * If true then vectors created will be nullable, this is the default behavior. If false then + * vectors created will be non-nullable. + * + * @param nullable Whether or not to create nullable vectors (default behavior is true) + */ + public void setAddVectorAsNullable(boolean nullable) { + addVectorAsNullable = nullable; + } + + @Override + public void start() { + throw new IllegalStateException(String.format("You tried to start when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public void end() { + throw new IllegalStateException(String.format("You tried to end when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public void startList() { + throw new IllegalStateException(String.format("You tried to start a list when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public void endList() { + throw new IllegalStateException(String.format("You tried to end a list when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public void startMap() { + throw new IllegalStateException(String.format("You tried to start a map when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public void endMap() { + throw new IllegalStateException(String.format("You tried to end a map when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public void startEntry() { + throw new IllegalStateException(String.format("You tried to start a map entry when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public MapWriter key() { + throw new IllegalStateException(String.format("You tried to start a map key when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public MapWriter value() { + throw new IllegalStateException(String.format("You tried to start a map value when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public void endEntry() { + throw new IllegalStateException(String.format("You tried to end a map entry when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign friendlyType = (minor.friendlyType!minor.boxedType!type.boxedType) /> + @Override + public void write(${name}Holder holder) { + fail("${name}"); + } + + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, ) { + fail("${name}"); + } + + <#if minor.class?starts_with("Decimal")> + public void write${minor.class}(${friendlyType} value) { + fail("${name}"); + } + + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, , ArrowType arrowType) { + fail("${name}"); + } + + public void writeBigEndianBytesTo${minor.class}(byte[] value) { + fail("${name}"); + } + + public void writeBigEndianBytesTo${minor.class}(byte[] value, ArrowType arrowType) { + fail("${name}"); + } + + + <#if minor.class?ends_with("VarBinary")> + public void write${minor.class}(byte[] value) { + fail("${name}"); + } + + public void write${minor.class}(byte[] value, int offset, int length) { + fail("${name}"); + } + + public void write${minor.class}(ByteBuffer value) { + fail("${name}"); + } + + public void write${minor.class}(ByteBuffer value, int offset, int length) { + fail("${name}"); + } + + + <#if minor.class?ends_with("VarChar")> + public void write${minor.class}(${friendlyType} value) { + fail("${name}"); + } + + public void write${minor.class}(String value) { + fail("${name}"); + } + + + + + public void writeNull() { + fail("${name}"); + } + + /** + * This implementation returns {@code false}. + *

+ * Must be overridden by struct writers. + *

+ */ + @Override + public boolean isEmptyStruct() { + return false; + } + + @Override + public StructWriter struct() { + fail("Struct"); + return null; + } + + @Override + public ListWriter list() { + fail("List"); + return null; + } + + @Override + public MapWriter map() { + fail("Map"); + return null; + } + + @Override + public StructWriter struct(String name) { + fail("Struct"); + return null; + } + + @Override + public ListWriter list(String name) { + fail("List"); + return null; + } + + @Override + public MapWriter map(String name) { + fail("Map"); + return null; + } + + @Override + public MapWriter map(boolean keysSorted) { + fail("Map"); + return null; + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + fail("Map"); + return null; + } + <#list vv.types as type><#list type.minor as minor> + <#assign lowerName = minor.class?uncap_first /> + <#if lowerName == "int" ><#assign lowerName = "integer" /> + <#assign upperName = minor.class?upper_case /> + <#assign capName = minor.class?cap_first /> + <#if minor.typeParams?? > + + @Override + public ${capName}Writer ${lowerName}(String name<#list minor.typeParams as typeParam>, ${typeParam.type} ${typeParam.name}) { + fail("${capName}(" + <#list minor.typeParams as typeParam>"${typeParam.name}: " + ${typeParam.name} + ", " + ")"); + return null; + } + + + @Override + public ${capName}Writer ${lowerName}(String name) { + fail("${capName}"); + return null; + } + + @Override + public ${capName}Writer ${lowerName}() { + fail("${capName}"); + return null; + } + + + + public void copyReader(FieldReader reader) { + fail("Copy FieldReader"); + } + + public void copyReaderToField(String name, FieldReader reader) { + fail("Copy FieldReader to STring"); + } + + private void fail(String name) { + throw new IllegalArgumentException(String.format("You tried to write a %s type when you are using a ValueWriter of type %s.", name, this.getClass().getSimpleName())); + } +} diff --git a/java/vector/target/classes/codegen/templates/AbstractPromotableFieldWriter.java b/java/vector/target/classes/codegen/templates/AbstractPromotableFieldWriter.java new file mode 100644 index 000000000000..59f9fb5b8098 --- /dev/null +++ b/java/vector/target/classes/codegen/templates/AbstractPromotableFieldWriter.java @@ -0,0 +1,331 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/AbstractPromotableFieldWriter.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + +<#function is_timestamp_tz type> + <#return type?starts_with("TimeStamp") && type?ends_with("TZ")> + + +/* + * A FieldWriter which delegates calls to another FieldWriter. The delegate FieldWriter can be promoted to a new type + * when necessary. Classes that extend this class are responsible for handling promotion. + * + * This class is generated using freemarker and the ${.template_name} template. + * + */ +@SuppressWarnings("unused") +abstract class AbstractPromotableFieldWriter extends AbstractFieldWriter { + /** + * Retrieve the FieldWriter, promoting if it is not a FieldWriter of the specified type + * @param type the type of the values we want to write + * @return the corresponding field writer + */ + protected FieldWriter getWriter(MinorType type) { + return getWriter(type, null); + } + + abstract protected FieldWriter getWriter(MinorType type, ArrowType arrowType); + + /** + * @return the current FieldWriter + */ + abstract protected FieldWriter getWriter(); + + @Override + public void start() { + getWriter(MinorType.STRUCT).start(); + } + + @Override + public void end() { + getWriter(MinorType.STRUCT).end(); + setPosition(idx() + 1); + } + + @Override + public void startList() { + getWriter(MinorType.LIST).startList(); + } + + @Override + public void endList() { + getWriter(MinorType.LIST).endList(); + setPosition(idx() + 1); + } + + @Override + public void startMap() { + getWriter(MinorType.MAP).startMap(); + } + + @Override + public void endMap() { + getWriter(MinorType.MAP).endMap(); + setPosition(idx() + 1); + } + + @Override + public void startEntry() { + getWriter(MinorType.MAP).startEntry(); + } + + @Override + public MapWriter key() { + return getWriter(MinorType.MAP).key(); + } + + @Override + public MapWriter value() { + return getWriter(MinorType.MAP).value(); + } + + @Override + public void endEntry() { + getWriter(MinorType.MAP).endEntry(); + } + + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#if minor.class == "Decimal"> + @Override + public void write(DecimalHolder holder) { + getWriter(MinorType.DECIMAL).write(holder); + } + + public void writeDecimal(int start, ArrowBuf buffer, ArrowType arrowType) { + getWriter(MinorType.DECIMAL).writeDecimal(start, buffer, arrowType); + } + + public void writeDecimal(int start, ArrowBuf buffer) { + getWriter(MinorType.DECIMAL).writeDecimal(start, buffer); + } + + public void writeBigEndianBytesToDecimal(byte[] value, ArrowType arrowType) { + getWriter(MinorType.DECIMAL).writeBigEndianBytesToDecimal(value, arrowType); + } + + public void writeBigEndianBytesToDecimal(byte[] value) { + getWriter(MinorType.DECIMAL).writeBigEndianBytesToDecimal(value); + } + <#elseif minor.class == "Decimal256"> + @Override + public void write(Decimal256Holder holder) { + getWriter(MinorType.DECIMAL256).write(holder); + } + + public void writeDecimal256(long start, ArrowBuf buffer, ArrowType arrowType) { + getWriter(MinorType.DECIMAL256).writeDecimal256(start, buffer, arrowType); + } + + public void writeDecimal256(long start, ArrowBuf buffer) { + getWriter(MinorType.DECIMAL256).writeDecimal256(start, buffer); + } + public void writeBigEndianBytesToDecimal256(byte[] value, ArrowType arrowType) { + getWriter(MinorType.DECIMAL256).writeBigEndianBytesToDecimal256(value, arrowType); + } + + public void writeBigEndianBytesToDecimal256(byte[] value) { + getWriter(MinorType.DECIMAL256).writeBigEndianBytesToDecimal256(value); + } + <#elseif is_timestamp_tz(minor.class)> + @Override + public void write(${name}Holder holder) { + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.${name?upper_case?remove_ending("TZ")}.getType(); + // Take the holder.timezone similar to how PromotableWriter.java:write(DecimalHolder) takes the scale from the holder. + ArrowType.Timestamp arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), holder.timezone); + getWriter(MinorType.${name?upper_case}, arrowType).write(holder); + } + + /** + * @deprecated + * The holder version should be used instead otherwise the timezone will default to UTC. + * @see #write(${name}Holder) + */ + @Deprecated + @Override + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, ) { + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.${name?upper_case?remove_ending("TZ")}.getType(); + // Assumes UTC if no timezone is provided + ArrowType.Timestamp arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), "UTC"); + getWriter(MinorType.${name?upper_case}, arrowType).write${minor.class}(<#list fields as field>${field.name}<#if field_has_next>, ); + } + <#elseif minor.class == "Duration"> + @Override + public void write(${name}Holder holder) { + ArrowType.Duration arrowType = new ArrowType.Duration(holder.unit); + getWriter(MinorType.${name?upper_case}, arrowType).write(holder); + } + + /** + * @deprecated + * If you experience errors with using this version of the method, switch to the holder version. + * The errors occur when using an untyped or unioned PromotableWriter, because this version of the + * method does not have enough information to infer the ArrowType. + * @see #write(${name}Holder) + */ + @Deprecated + @Override + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, ) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(<#list fields as field>${field.name}<#if field_has_next>, ); + } + <#elseif minor.class == "FixedSizeBinary"> + @Override + public void write(${name}Holder holder) { + ArrowType.FixedSizeBinary arrowType = new ArrowType.FixedSizeBinary(holder.byteWidth); + getWriter(MinorType.${name?upper_case}, arrowType).write(holder); + } + + /** + * @deprecated + * If you experience errors with using this version of the method, switch to the holder version. + * The errors occur when using an untyped or unioned PromotableWriter, because this version of the + * method does not have enough information to infer the ArrowType. + * @see #write(${name}Holder) + */ + @Deprecated + @Override + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, ) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(<#list fields as field>${field.name}<#if field_has_next>, ); + } + <#else> + @Override + public void write(${name}Holder holder) { + getWriter(MinorType.${name?upper_case}).write(holder); + } + + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, ) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(<#list fields as field>${field.name}<#if field_has_next>, ); + } + + + <#if minor.class?ends_with("VarBinary")> + @Override + public void write${minor.class}(byte[] value) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(value); + } + + @Override + public void write${minor.class}(byte[] value, int offset, int length) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(value, offset, length); + } + + @Override + public void write${minor.class}(ByteBuffer value) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(value); + } + + @Override + public void write${minor.class}(ByteBuffer value, int offset, int length) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(value, offset, length); + } + <#elseif minor.class?ends_with("VarChar")> + @Override + public void write${minor.class}(Text value) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(value); + } + + @Override + public void write${minor.class}(String value) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(value); + } + + + + public void writeNull() { + } + + @Override + public StructWriter struct() { + return getWriter(MinorType.LIST).struct(); + } + + @Override + public ListWriter list() { + return getWriter(MinorType.LIST).list(); + } + + @Override + public MapWriter map() { + return getWriter(MinorType.LIST).map(); + } + + @Override + public MapWriter map(boolean keysSorted) { + return getWriter(MinorType.MAP, new ArrowType.Map(keysSorted)); + } + + @Override + public StructWriter struct(String name) { + return getWriter(MinorType.STRUCT).struct(name); + } + + @Override + public ListWriter list(String name) { + return getWriter(MinorType.STRUCT).list(name); + } + + @Override + public MapWriter map(String name) { + return getWriter(MinorType.STRUCT).map(name); + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + return getWriter(MinorType.STRUCT).map(name, keysSorted); + } + <#list vv.types as type><#list type.minor as minor> + <#assign lowerName = minor.class?uncap_first /> + <#if lowerName == "int" ><#assign lowerName = "integer" /> + <#assign upperName = minor.class?upper_case /> + <#assign capName = minor.class?cap_first /> + + <#if minor.typeParams?? > + @Override + public ${capName}Writer ${lowerName}(String name<#list minor.typeParams as typeParam>, ${typeParam.type} ${typeParam.name}) { + return getWriter(MinorType.STRUCT).${lowerName}(name<#list minor.typeParams as typeParam>, ${typeParam.name}); + } + + + @Override + public ${capName}Writer ${lowerName}(String name) { + return getWriter(MinorType.STRUCT).${lowerName}(name); + } + + @Override + public ${capName}Writer ${lowerName}() { + return getWriter(MinorType.LIST).${lowerName}(); + } + + + + public void copyReader(FieldReader reader) { + getWriter().copyReader(reader); + } + + public void copyReaderToField(String name, FieldReader reader) { + getWriter().copyReaderToField(name, reader); + } +} diff --git a/java/vector/target/classes/codegen/templates/ArrowType.java b/java/vector/target/classes/codegen/templates/ArrowType.java new file mode 100644 index 000000000000..b08d4ad0afac --- /dev/null +++ b/java/vector/target/classes/codegen/templates/ArrowType.java @@ -0,0 +1,375 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/types/pojo/ArrowType.java" /> +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.types.pojo; + +import com.google.flatbuffers.FlatBufferBuilder; + +import java.util.Objects; + +import org.apache.arrow.flatbuf.Type; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.FieldVector; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeInfo; + +/** + * Arrow types + * Source code generated using FreeMarker template ${.template_name} + **/ +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + include = JsonTypeInfo.As.PROPERTY, + property = "name") +@JsonSubTypes({ +<#list arrowTypes.types as type> + @JsonSubTypes.Type(value = ArrowType.${type.name?remove_ending("_")}.class, name = "${type.name?remove_ending("_")?lower_case}"), + +}) +public abstract class ArrowType { + + public static abstract class PrimitiveType extends ArrowType { + + private PrimitiveType() { + } + + @Override + public boolean isComplex() { + return false; + } + } + + public static abstract class ComplexType extends ArrowType { + + private ComplexType() { + } + + @Override + public boolean isComplex() { + return true; + } + } + + public static enum ArrowTypeID { + <#list arrowTypes.types as type> + <#assign name = type.name> + ${name?remove_ending("_")}(Type.${name}), + + NONE(Type.NONE); + + private final byte flatbufType; + + public byte getFlatbufID() { + return this.flatbufType; + } + + private ArrowTypeID(byte flatbufType) { + this.flatbufType = flatbufType; + } + } + + @JsonIgnore + public abstract ArrowTypeID getTypeID(); + @JsonIgnore + public abstract boolean isComplex(); + public abstract int getType(FlatBufferBuilder builder); + public abstract T accept(ArrowTypeVisitor visitor); + + /** + * to visit the ArrowTypes + * + * type.accept(new ArrowTypeVisitor<Type>() { + * ... + * }); + * + */ + public static interface ArrowTypeVisitor { + <#list arrowTypes.types as type> + T visit(${type.name?remove_ending("_")} type); + + default T visit(ExtensionType type) { + return type.storageType().accept(this); + } + } + + /** + * to visit the Complex ArrowTypes and bundle Primitive ones in one case + */ + public static abstract class ComplexTypeVisitor implements ArrowTypeVisitor { + + public T visit(PrimitiveType type) { + throw new UnsupportedOperationException("Unexpected Primitive type: " + type); + } + + <#list arrowTypes.types as type> + <#if !type.complex> + public final T visit(${type.name?remove_ending("_")} type) { + return visit((PrimitiveType) type); + } + + + } + + /** + * to visit the Primitive ArrowTypes and bundle Complex ones under one case + */ + public static abstract class PrimitiveTypeVisitor implements ArrowTypeVisitor { + + public T visit(ComplexType type) { + throw new UnsupportedOperationException("Unexpected Complex type: " + type); + } + + <#list arrowTypes.types as type> + <#if type.complex> + public final T visit(${type.name?remove_ending("_")} type) { + return visit((ComplexType) type); + } + + + } + + <#list arrowTypes.types as type> + <#assign name = type.name?remove_ending("_")> + <#assign fields = type.fields> + public static class ${name} extends <#if type.complex>ComplexType<#else>PrimitiveType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.${name}; + <#if type.fields?size == 0> + public static final ${name} INSTANCE = new ${name}(); + <#else> + + <#list fields as field> + <#assign fieldType = field.valueType!field.type> + ${fieldType} ${field.name}; + + + + <#if type.name == "Decimal"> + // Needed to support golden file integration tests. + @JsonCreator + public static Decimal createDecimal( + @JsonProperty("precision") int precision, + @JsonProperty("scale") int scale, + @JsonProperty("bitWidth") Integer bitWidth) { + + return new Decimal(precision, scale, bitWidth == null ? 128 : bitWidth); + } + + /** + * Construct Decimal with 128 bits. + * + * This is kept mainly for the sake of backward compatibility. + * Please use {@link org.apache.arrow.vector.types.pojo.ArrowType.Decimal#Decimal(int, int, int)} instead. + * + * @deprecated This API will be removed in a future release. + */ + @Deprecated + public Decimal(int precision, int scale) { + this(precision, scale, 128); + } + + <#else> + @JsonCreator + + public ${type.name}( + <#list type.fields as field> + <#assign fieldType = field.valueType!field.type> + @JsonProperty("${field.name}") ${fieldType} ${field.name}<#if field_has_next>, + + ) { + <#list type.fields as field> + this.${field.name} = ${field.name}; + + } + + <#list fields as field> + <#assign fieldType = field.valueType!field.type> + public ${fieldType} get${field.name?cap_first}() { + return ${field.name}; + } + + + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + <#list type.fields as field> + <#if field.type == "String"> + int ${field.name} = this.${field.name} == null ? -1 : builder.createString(this.${field.name}); + + <#if field.type == "int[]"> + int ${field.name} = this.${field.name} == null ? -1 : org.apache.arrow.flatbuf.${type.name}.create${field.name?cap_first}Vector(builder, this.${field.name}); + + + org.apache.arrow.flatbuf.${type.name}.start${type.name}(builder); + <#list type.fields as field> + <#if field.type == "String" || field.type == "int[]"> + if (this.${field.name} != null) { + org.apache.arrow.flatbuf.${type.name}.add${field.name?cap_first}(builder, ${field.name}); + } + <#else> + org.apache.arrow.flatbuf.${type.name}.add${field.name?cap_first}(builder, this.${field.name}<#if field.valueType??>.getFlatbufID()); + + + return org.apache.arrow.flatbuf.${type.name}.end${type.name}(builder); + } + + public String toString() { + return "${name}" + <#if fields?size != 0> + + "(" + <#list fields as field> + + <#if field.type == "int[]">java.util.Arrays.toString(${field.name})<#else>${field.name}<#if field_has_next> + ", " + + + ")" + + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {<#list type.fields as field>${field.name}<#if field_has_next>, }); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof ${name})) { + return false; + } + <#if type.fields?size == 0> + return true; + <#else> + ${type.name} that = (${type.name}) obj; + return <#list type.fields as field>Objects.deepEquals(this.${field.name}, that.${field.name}) <#if field_has_next>&&<#else>; + + + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + + + /** + * A user-defined data type that wraps an underlying storage type. + */ + public abstract static class ExtensionType extends ComplexType { + /** The on-wire type for this user-defined type. */ + public abstract ArrowType storageType(); + /** The name of this user-defined type. Used to identify the type during serialization. */ + public abstract String extensionName(); + /** Check equality of this type to another user-defined type. */ + public abstract boolean extensionEquals(ExtensionType other); + /** Save any metadata for this type. */ + public abstract String serialize(); + /** Given saved metadata and the underlying storage type, construct a new instance of the user type. */ + public abstract ArrowType deserialize(ArrowType storageType, String serializedData); + /** Construct a vector for the user type. */ + public abstract FieldVector getNewVector(String name, FieldType fieldType, BufferAllocator allocator); + + /** The field metadata key storing the name of the extension type. */ + public static final String EXTENSION_METADATA_KEY_NAME = "ARROW:extension:name"; + /** The field metadata key storing metadata for the extension type. */ + public static final String EXTENSION_METADATA_KEY_METADATA = "ARROW:extension:metadata"; + + @Override + public ArrowTypeID getTypeID() { + return storageType().getTypeID(); + } + + @Override + public int getType(FlatBufferBuilder builder) { + return storageType().getType(builder); + } + + public String toString() { + return "ExtensionType(" + extensionName() + ", " + storageType().toString() + ")"; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {storageType(), extensionName()}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof ExtensionType)) { + return false; + } + return this.extensionEquals((ExtensionType) obj); + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + + private static final int defaultDecimalBitWidth = 128; + + public static org.apache.arrow.vector.types.pojo.ArrowType getTypeForField(org.apache.arrow.flatbuf.Field field) { + switch(field.typeType()) { + <#list arrowTypes.types as type> + <#assign name = type.name?remove_ending("_")> + <#assign nameLower = type.name?lower_case> + <#assign fields = type.fields> + case Type.${type.name}: { + org.apache.arrow.flatbuf.${type.name} ${nameLower}Type = (org.apache.arrow.flatbuf.${type.name}) field.type(new org.apache.arrow.flatbuf.${type.name}()); + <#list type.fields as field> + <#if field.type == "int[]"> + ${field.type} ${field.name} = new int[${nameLower}Type.${field.name}Length()]; + for (int i = 0; i< ${field.name}.length; ++i) { + ${field.name}[i] = ${nameLower}Type.${field.name}(i); + } + <#else> + ${field.type} ${field.name} = ${nameLower}Type.${field.name}(); + + + <#if type.name == "Decimal"> + if (bitWidth != defaultDecimalBitWidth && bitWidth != 256) { + throw new IllegalArgumentException("Library only supports 128-bit and 256-bit decimal values"); + } + + return new ${name}(<#list type.fields as field><#if field.valueType??>${field.valueType}.fromFlatbufID(${field.name})<#else>${field.name}<#if field_has_next>, ); + } + + default: + throw new UnsupportedOperationException("Unsupported type: " + field.typeType()); + } + } + + public static Int getInt(org.apache.arrow.flatbuf.Field field) { + org.apache.arrow.flatbuf.Int intType = (org.apache.arrow.flatbuf.Int) field.type(new org.apache.arrow.flatbuf.Int()); + return new Int(intType.bitWidth(), intType.isSigned()); + } +} + + diff --git a/java/vector/target/classes/codegen/templates/BaseReader.java b/java/vector/target/classes/codegen/templates/BaseReader.java new file mode 100644 index 000000000000..85d582a53bf5 --- /dev/null +++ b/java/vector/target/classes/codegen/templates/BaseReader.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/reader/BaseReader.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.reader; + +<#include "/@includes/vv_imports.ftl" /> + +/** + * Source code generated using FreeMarker template ${.template_name} + */ +@SuppressWarnings("unused") +public interface BaseReader extends Positionable{ + Field getField(); + MinorType getMinorType(); + void reset(); + void read(UnionHolder holder); + void read(int index, UnionHolder holder); + void copyAsValue(UnionWriter writer); + void read(DenseUnionHolder holder); + void read(int index, DenseUnionHolder holder); + void copyAsValue(DenseUnionWriter writer); + boolean isSet(); + + public interface StructReader extends BaseReader, Iterable{ + FieldReader reader(String name); + } + + public interface RepeatedStructReader extends StructReader{ + boolean next(); + int size(); + void copyAsValue(StructWriter writer); + } + + public interface ListReader extends BaseReader{ + FieldReader reader(); + } + + public interface RepeatedListReader extends ListReader{ + boolean next(); + int size(); + void copyAsValue(ListWriter writer); + } + + public interface MapReader extends BaseReader{ + FieldReader reader(); + } + + public interface RepeatedMapReader extends MapReader{ + boolean next(); + int size(); + void copyAsValue(MapWriter writer); + } + + public interface ScalarReader extends + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> ${name}Reader, + BaseReader {} + + interface ComplexReader{ + StructReader rootAsStruct(); + ListReader rootAsList(); + boolean rootIsStruct(); + boolean ok(); + } +} + diff --git a/java/vector/target/classes/codegen/templates/BaseWriter.java b/java/vector/target/classes/codegen/templates/BaseWriter.java new file mode 100644 index 000000000000..35df256b324b --- /dev/null +++ b/java/vector/target/classes/codegen/templates/BaseWriter.java @@ -0,0 +1,136 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/writer/BaseWriter.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.writer; + +<#include "/@includes/vv_imports.ftl" /> + +/* + * File generated from ${.template_name} using FreeMarker. + */ +@SuppressWarnings("unused") +public interface BaseWriter extends AutoCloseable, Positionable { + int getValueCapacity(); + void writeNull(); + + public interface StructWriter extends BaseWriter { + + Field getField(); + + /** + * Whether this writer is a struct writer and is empty (has no children). + * + *

+ * Intended only for use in determining whether to add dummy vector to + * avoid empty (zero-column) schema, as in JsonReader. + *

+ * @return whether the struct is empty + */ + boolean isEmptyStruct(); + + <#list vv.types as type><#list type.minor as minor> + <#assign lowerName = minor.class?uncap_first /> + <#if lowerName == "int" ><#assign lowerName = "integer" /> + <#assign upperName = minor.class?upper_case /> + <#assign capName = minor.class?cap_first /> + <#if minor.typeParams?? > + ${capName}Writer ${lowerName}(String name<#list minor.typeParams as typeParam>, ${typeParam.type} ${typeParam.name}); + + ${capName}Writer ${lowerName}(String name); + + + void copyReaderToField(String name, FieldReader reader); + StructWriter struct(String name); + ListWriter list(String name); + MapWriter map(String name); + MapWriter map(String name, boolean keysSorted); + void start(); + void end(); + } + + public interface ListWriter extends BaseWriter { + void startList(); + void endList(); + StructWriter struct(); + ListWriter list(); + MapWriter map(); + MapWriter map(boolean keysSorted); + void copyReader(FieldReader reader); + + <#list vv.types as type><#list type.minor as minor> + <#assign lowerName = minor.class?uncap_first /> + <#if lowerName == "int" ><#assign lowerName = "integer" /> + <#assign upperName = minor.class?upper_case /> + <#assign capName = minor.class?cap_first /> + ${capName}Writer ${lowerName}(); + + } + + public interface MapWriter extends ListWriter { + void startMap(); + void endMap(); + + void startEntry(); + void endEntry(); + + MapWriter key(); + MapWriter value(); + } + + public interface ScalarWriter extends + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> ${name}Writer, BaseWriter {} + + public interface ComplexWriter { + void allocate(); + void clear(); + void copyReader(FieldReader reader); + StructWriter rootAsStruct(); + ListWriter rootAsList(); + MapWriter rootAsMap(boolean keysSorted); + + void setPosition(int index); + void setValueCount(int count); + void reset(); + } + + public interface StructOrListWriter { + void start(); + void end(); + StructOrListWriter struct(String name); + /** + * @deprecated use {@link #listOfStruct()} instead. + */ + StructOrListWriter listoftstruct(String name); + StructOrListWriter listOfStruct(String name); + StructOrListWriter list(String name); + boolean isStructWriter(); + boolean isListWriter(); + VarCharWriter varChar(String name); + IntWriter integer(String name); + BigIntWriter bigInt(String name); + Float4Writer float4(String name); + Float8Writer float8(String name); + BitWriter bit(String name); + VarBinaryWriter binary(String name); + } +} diff --git a/java/vector/target/classes/codegen/templates/CaseSensitiveStructWriters.java b/java/vector/target/classes/codegen/templates/CaseSensitiveStructWriters.java new file mode 100644 index 000000000000..cc0dd7b335c5 --- /dev/null +++ b/java/vector/target/classes/codegen/templates/CaseSensitiveStructWriters.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +<@pp.dropOutputFile /> +<#list ["Nullable", "Single"] as mode> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/${mode}CaseSensitiveStructWriter.java" /> +<#assign index = "idx()"> +<#if mode == "Single"> +<#assign containerClass = "NonNullableStructVector" /> +<#else> +<#assign containerClass = "StructVector" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> +/* + * This class is generated using FreeMarker and the ${.template_name} template. + */ +@SuppressWarnings("unused") +public class ${mode}CaseSensitiveStructWriter extends ${mode}StructWriter { + public ${mode}CaseSensitiveStructWriter(${containerClass} container) { + super(container); + } + + @Override + protected String handleCase(final String input){ + return input; + } + + @Override + protected NullableStructWriterFactory getNullableStructWriterFactory() { + return NullableStructWriterFactory.getNullableCaseSensitiveStructWriterFactoryInstance(); + } + +} + diff --git a/java/vector/target/classes/codegen/templates/ComplexCopier.java b/java/vector/target/classes/codegen/templates/ComplexCopier.java new file mode 100644 index 000000000000..1a3ba940e797 --- /dev/null +++ b/java/vector/target/classes/codegen/templates/ComplexCopier.java @@ -0,0 +1,210 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.arrow.vector.complex.impl.UnionMapReader; +import org.apache.arrow.vector.complex.reader.FieldReader; +import org.apache.arrow.vector.complex.writer.FieldWriter; +import org.apache.arrow.vector.types.Types; + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/ComplexCopier.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + +/* + * This class is generated using freemarker and the ${.template_name} template. + */ +@SuppressWarnings("unused") +public class ComplexCopier { + + /** + * Do a deep copy of the value in input into output + * @param input field to read from + * @param output field to write to + */ + public static void copy(FieldReader input, FieldWriter output) { + writeValue(input, output); + } + + private static void writeValue(FieldReader reader, FieldWriter writer) { + final MinorType mt = reader.getMinorType(); + + switch (mt) { + + case LIST: + case LARGELIST: + case FIXED_SIZE_LIST: + if (reader.isSet()) { + writer.startList(); + while (reader.next()) { + FieldReader childReader = reader.reader(); + FieldWriter childWriter = getListWriterForReader(childReader, writer); + if (childReader.isSet()) { + writeValue(childReader, childWriter); + } else { + childWriter.writeNull(); + } + } + writer.endList(); + } else { + writer.writeNull(); + } + break; + case MAP: + if (reader.isSet()) { + UnionMapReader mapReader = (UnionMapReader) reader; + writer.startMap(); + while (mapReader.next()) { + FieldReader structReader = reader.reader(); + if (structReader.isSet()) { + writer.startEntry(); + writeValue(mapReader.key(), getMapWriterForReader(mapReader.key(), writer.key())); + writeValue(mapReader.value(), getMapWriterForReader(mapReader.value(), writer.value())); + writer.endEntry(); + } else { + writer.writeNull(); + } + } + writer.endMap(); + } else { + writer.writeNull(); + } + break; + case STRUCT: + if (reader.isSet()) { + writer.start(); + for(String name : reader){ + FieldReader childReader = reader.reader(name); + if (childReader.getMinorType() != Types.MinorType.NULL) { + FieldWriter childWriter = getStructWriterForReader(childReader, writer, name); + if (childReader.isSet()) { + writeValue(childReader, childWriter); + } else { + childWriter.writeNull(); + } + } + } + writer.end(); + } else { + writer.writeNull(); + } + break; + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") > + + case ${name?upper_case}: + if (reader.isSet()) { + Nullable${name}Holder ${uncappedName}Holder = new Nullable${name}Holder(); + reader.read(${uncappedName}Holder); + if (${uncappedName}Holder.isSet == 1) { + writer.write${name}(<#list fields as field>${uncappedName}Holder.${field.name}<#if field_has_next>, <#if minor.class?starts_with("Decimal")>, new ArrowType.Decimal(${uncappedName}Holder.precision, ${uncappedName}Holder.scale, ${name}Holder.WIDTH * 8)); + } + } else { + writer.writeNull(); + } + break; + + + + } + } + + private static FieldWriter getStructWriterForReader(FieldReader reader, StructWriter writer, String name) { + switch (reader.getMinorType()) { + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams??> + case ${name?upper_case}: + return (FieldWriter) writer.<#if name == "Int">integer<#else>${uncappedName}(name); + + <#if minor.class?starts_with("Decimal")> + case ${name?upper_case}: + if (reader.getField().getType() instanceof ArrowType.Decimal) { + ArrowType.Decimal type = (ArrowType.Decimal) reader.getField().getType(); + return (FieldWriter) writer.${uncappedName}(name, type.getScale(), type.getPrecision()); + } else { + return (FieldWriter) writer.${uncappedName}(name); + } + + + + case STRUCT: + return (FieldWriter) writer.struct(name); + case FIXED_SIZE_LIST: + case LIST: + return (FieldWriter) writer.list(name); + case MAP: + return (FieldWriter) writer.map(name); + default: + throw new UnsupportedOperationException(reader.getMinorType().toString()); + } + } + + private static FieldWriter getListWriterForReader(FieldReader reader, ListWriter writer) { + switch (reader.getMinorType()) { + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") > + case ${name?upper_case}: + return (FieldWriter) writer.<#if name == "Int">integer<#else>${uncappedName}(); + + + case STRUCT: + return (FieldWriter) writer.struct(); + case FIXED_SIZE_LIST: + case LIST: + case MAP: + case NULL: + return (FieldWriter) writer.list(); + default: + throw new UnsupportedOperationException(reader.getMinorType().toString()); + } + } + + private static FieldWriter getMapWriterForReader(FieldReader reader, MapWriter writer) { + switch (reader.getMinorType()) { + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") > + case ${name?upper_case}: + return (FieldWriter) writer.<#if name == "Int">integer<#else>${uncappedName}(); + + + case STRUCT: + return (FieldWriter) writer.struct(); + case FIXED_SIZE_LIST: + case LIST: + case NULL: + return (FieldWriter) writer.list(); + case MAP: + return (FieldWriter) writer.map(false); + default: + throw new UnsupportedOperationException(reader.getMinorType().toString()); + } + } +} diff --git a/java/vector/target/classes/codegen/templates/ComplexReaders.java b/java/vector/target/classes/codegen/templates/ComplexReaders.java new file mode 100644 index 000000000000..48fb6603ad5e --- /dev/null +++ b/java/vector/target/classes/codegen/templates/ComplexReaders.java @@ -0,0 +1,147 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.lang.Override; +import java.util.List; + +import org.apache.arrow.record.TransferPair; +import org.apache.arrow.vector.complex.IndexHolder; +import org.apache.arrow.vector.complex.writer.IntervalWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; + +<@pp.dropOutputFile /> +<#list vv.types as type> +<#list type.minor as minor> +<#list [""] as mode> +<#assign lowerName = minor.class?uncap_first /> +<#if lowerName == "int" ><#assign lowerName = "integer" /> +<#assign name = minor.class?cap_first /> +<#assign javaType = (minor.javaType!type.javaType) /> +<#assign friendlyType = (minor.friendlyType!minor.boxedType!type.boxedType) /> +<#assign safeType=friendlyType /> +<#if safeType=="byte[]"><#assign safeType="ByteArray" /> + +<#assign hasFriendly = minor.friendlyType!"no" == "no" /> + +<#list ["Nullable"] as nullMode> +<#if mode == "" > +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/${name}ReaderImpl.java" /> +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + +/** + * Source code generated using FreeMarker template ${.template_name} + */ +@SuppressWarnings("unused") +public class ${name}ReaderImpl extends AbstractFieldReader { + + private final ${name}Vector vector; + + public ${name}ReaderImpl(${name}Vector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(${minor.class?cap_first}Writer writer){ + ${minor.class?cap_first}WriterImpl impl = (${minor.class?cap_first}WriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + ${minor.class?cap_first}WriterImpl impl = (${minor.class?cap_first}WriterImpl) writer.${lowerName}(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + <#if nullMode != "Nullable"> + public void read(${minor.class?cap_first}Holder h){ + vector.get(idx(), h); + } + + + public void read(Nullable${minor.class?cap_first}Holder h){ + vector.get(idx(), h); + } + + public ${friendlyType} read${safeType}(){ + return vector.getObject(idx()); + } + + <#if minor.class == "TimeStampSec" || + minor.class == "TimeStampMilli" || + minor.class == "TimeStampMicro" || + minor.class == "TimeStampNano"> + @Override + public ${minor.boxedType} read${minor.boxedType}(){ + return vector.get(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} + + +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/reader/${name}Reader.java" /> +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.reader; + +<#include "/@includes/vv_imports.ftl" /> +/** + * Source code generated using FreeMarker template ${.template_name} + */ +@SuppressWarnings("unused") +public interface ${name}Reader extends BaseReader{ + + public void read(${minor.class?cap_first}Holder h); + public void read(Nullable${minor.class?cap_first}Holder h); + public Object readObject(); + // read friendly type + public ${friendlyType} read${safeType}(); + public boolean isSet(); + public void copyAsValue(${minor.class}Writer writer); + public void copyAsField(String name, ${minor.class}Writer writer); + +} + + + + + + + + diff --git a/java/vector/target/classes/codegen/templates/ComplexWriters.java b/java/vector/target/classes/codegen/templates/ComplexWriters.java new file mode 100644 index 000000000000..2e3caae1f0f2 --- /dev/null +++ b/java/vector/target/classes/codegen/templates/ComplexWriters.java @@ -0,0 +1,280 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +<@pp.dropOutputFile /> +<#list vv.types as type> +<#list type.minor as minor> +<#list ["Nullable"] as mode> +<#assign name = minor.class?cap_first /> +<#assign eName = name /> +<#assign javaType = (minor.javaType!type.javaType) /> +<#assign fields = minor.fields!type.fields /> +<#assign friendlyType = (minor.friendlyType!minor.boxedType!type.boxedType) /> + +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/${eName}WriterImpl.java" /> +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + +<#function is_timestamp_tz type> + <#return type?starts_with("TimeStamp") && type?ends_with("TZ")> + + +/* + * This class is generated using FreeMarker on the ${.template_name} template. + */ +@SuppressWarnings("unused") +public class ${eName}WriterImpl extends AbstractFieldWriter { + + final ${name}Vector vector; + +<#if minor.class?ends_with("VarChar")> + private final Text textBuffer = new Text(); + + +public ${eName}WriterImpl(${name}Vector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + <#if mode == "Repeated"> + + public void write(${minor.class?cap_first}Holder h) { + mutator.addSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(${minor.class?cap_first}Holder h) { + mutator.addSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, ) { + mutator.addSafe(idx(), <#list fields as field>${field.name}<#if field_has_next>, ); + vector.setValueCount(idx()+1); + } + + public void setPosition(int idx) { + super.setPosition(idx); + mutator.startNewValue(idx); + } + + + <#else> + + <#if !minor.class?starts_with("Decimal")> + public void write(${minor.class}Holder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(Nullable${minor.class}Holder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, ) { + vector.setSafe(idx(), 1<#list fields as field><#if field.include!true >, ${field.name}); + vector.setValueCount(idx()+1); + } + + + <#if minor.class?ends_with("VarChar")> + @Override + public void write${minor.class}(${friendlyType} value) { + vector.setSafe(idx(), value); + vector.setValueCount(idx()+1); + } + + @Override + public void write${minor.class}(String value) { + textBuffer.set(value); + vector.setSafe(idx(), textBuffer); + vector.setValueCount(idx()+1); + } + + + <#if minor.class?starts_with("Decimal")> + + public void write(${minor.class}Holder h){ + DecimalUtility.checkPrecisionAndScale(h.precision, h.scale, vector.getPrecision(), vector.getScale()); + vector.setSafe(idx(), h); + vector.setValueCount(idx() + 1); + } + + public void write(Nullable${minor.class}Holder h){ + if (h.isSet == 1) { + DecimalUtility.checkPrecisionAndScale(h.precision, h.scale, vector.getPrecision(), vector.getScale()); + } + vector.setSafe(idx(), h); + vector.setValueCount(idx() + 1); + } + + public void write${minor.class}(long start, ArrowBuf buffer){ + vector.setSafe(idx(), 1, start, buffer); + vector.setValueCount(idx() + 1); + } + + public void write${minor.class}(long start, ArrowBuf buffer, ArrowType arrowType){ + DecimalUtility.checkPrecisionAndScale(((ArrowType.Decimal) arrowType).getPrecision(), + ((ArrowType.Decimal) arrowType).getScale(), vector.getPrecision(), vector.getScale()); + vector.setSafe(idx(), 1, start, buffer); + vector.setValueCount(idx() + 1); + } + + public void write${minor.class}(BigDecimal value){ + // vector.setSafe already does precision and scale checking + vector.setSafe(idx(), value); + vector.setValueCount(idx() + 1); + } + + public void writeBigEndianBytesTo${minor.class}(byte[] value, ArrowType arrowType){ + DecimalUtility.checkPrecisionAndScale(((ArrowType.Decimal) arrowType).getPrecision(), + ((ArrowType.Decimal) arrowType).getScale(), vector.getPrecision(), vector.getScale()); + vector.setBigEndianSafe(idx(), value); + vector.setValueCount(idx() + 1); + } + + public void writeBigEndianBytesTo${minor.class}(byte[] value){ + vector.setBigEndianSafe(idx(), value); + vector.setValueCount(idx() + 1); + } + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + + + <#if minor.class?ends_with("VarBinary")> + public void write${minor.class}(byte[] value) { + vector.setSafe(idx(), value); + vector.setValueCount(idx() + 1); + } + + public void write${minor.class}(byte[] value, int offset, int length) { + vector.setSafe(idx(), value, offset, length); + vector.setValueCount(idx() + 1); + } + + public void write${minor.class}(ByteBuffer value) { + vector.setSafe(idx(), value, 0, value.remaining()); + vector.setValueCount(idx() + 1); + } + + public void write${minor.class}(ByteBuffer value, int offset, int length) { + vector.setSafe(idx(), value, offset, length); + vector.setValueCount(idx() + 1); + } + +} + +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/writer/${eName}Writer.java" /> +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.writer; + +<#include "/@includes/vv_imports.ftl" /> +/* + * This class is generated using FreeMarker on the ${.template_name} template. + */ +@SuppressWarnings("unused") +public interface ${eName}Writer extends BaseWriter { + public void write(${minor.class}Holder h); + +<#if minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + /** + * @deprecated + * The holder version should be used instead because the plain value version does not contain enough information + * to fully specify this field type. + * @see #write(${minor.class}Holder) + */ + @Deprecated + + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, ); +<#if minor.class?starts_with("Decimal")> + + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, , ArrowType arrowType); + + public void write${minor.class}(${friendlyType} value); + + public void writeBigEndianBytesTo${minor.class}(byte[] value, ArrowType arrowType); + + /** + * @deprecated + * Use either the version that additionally takes in an ArrowType or use the holder version. + * This version does not contain enough information to fully specify this field type. + * @see #writeBigEndianBytesTo${minor.class}(byte[], ArrowType) + * @see #write(${minor.class}Holder) + */ + @Deprecated + public void writeBigEndianBytesTo${minor.class}(byte[] value); + + +<#if minor.class?ends_with("VarBinary")> + public void write${minor.class}(byte[] value); + + public void write${minor.class}(byte[] value, int offset, int length); + + public void write${minor.class}(ByteBuffer value); + + public void write${minor.class}(ByteBuffer value, int offset, int length); + + +<#if minor.class?ends_with("VarChar")> + public void write${minor.class}(${friendlyType} value); + + public void write${minor.class}(String value); + +} + + + + diff --git a/java/vector/target/classes/codegen/templates/DenseUnionReader.java b/java/vector/target/classes/codegen/templates/DenseUnionReader.java new file mode 100644 index 000000000000..a085e03ea64e --- /dev/null +++ b/java/vector/target/classes/codegen/templates/DenseUnionReader.java @@ -0,0 +1,229 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +import org.apache.arrow.vector.complex.impl.UnionListReader; +import org.apache.arrow.vector.types.Types.MinorType; +import org.apache.arrow.vector.types.pojo.Field; + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/DenseUnionReader.java" /> + + +<#include "/@includes/license.ftl" /> + + package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> +/** + * Source code generated using FreeMarker template ${.template_name} + */ +@SuppressWarnings("unused") +public class DenseUnionReader extends AbstractFieldReader { + + private BaseReader[] readers = new BaseReader[Byte.MAX_VALUE + 1]; + public DenseUnionVector data; + + public DenseUnionReader(DenseUnionVector data) { + this.data = data; + } + + public MinorType getMinorType() { + byte typeId = data.getTypeId(idx()); + return data.getVectorByType(typeId).getMinorType(); + } + + public byte getTypeId() { + return data.getTypeId(idx()); + } + + @Override + public Field getField() { + return data.getField(); + } + + public boolean isSet(){ + return !data.isNull(idx()); + } + + public void read(DenseUnionHolder holder) { + holder.reader = this; + holder.isSet = this.isSet() ? 1 : 0; + holder.typeId = getTypeId(); + } + + public void read(int index, UnionHolder holder) { + byte typeId = data.getTypeId(index); + getList(typeId).read(index, holder); + } + + private FieldReader getReaderForIndex(int index) { + byte typeId = data.getTypeId(index); + MinorType minorType = data.getVectorByType(typeId).getMinorType(); + FieldReader reader = (FieldReader) readers[typeId]; + if (reader != null) { + return reader; + } + switch (minorType) { + case NULL: + reader = NullReader.INSTANCE; + break; + case STRUCT: + reader = (FieldReader) getStruct(typeId); + break; + case LIST: + reader = (FieldReader) getList(typeId); + break; + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal")> + case ${name?upper_case}: + reader = (FieldReader) get${name}(typeId); + break; + + + + default: + throw new UnsupportedOperationException("Unsupported type: " + MinorType.values()[typeId]); + } + return reader; + } + + private SingleStructReaderImpl structReader; + + private StructReader getStruct(byte typeId) { + StructReader structReader = (StructReader) readers[typeId]; + if (structReader == null) { + structReader = (SingleStructReaderImpl) data.getVectorByType(typeId).getReader(); + structReader.setPosition(idx()); + readers[typeId] = structReader; + } + return structReader; + } + + private UnionListReader listReader; + + private FieldReader getList(byte typeId) { + UnionListReader listReader = (UnionListReader) readers[typeId]; + if (listReader == null) { + listReader = new UnionListReader((ListVector) data.getVectorByType(typeId)); + listReader.setPosition(idx()); + readers[typeId] = listReader; + } + return listReader; + } + + private UnionMapReader mapReader; + + private FieldReader getMap(byte typeId) { + UnionMapReader mapReader = (UnionMapReader) readers[typeId]; + if (mapReader == null) { + mapReader = new UnionMapReader((MapVector) data.getVectorByType(typeId)); + mapReader.setPosition(idx()); + readers[typeId] = mapReader; + } + return mapReader; + } + + @Override + public java.util.Iterator iterator() { + throw new UnsupportedOperationException(); + } + + @Override + public void copyAsValue(UnionWriter writer) { + writer.data.copyFrom(idx(), writer.idx(), data); + } + + <#list ["Object", "BigDecimal", "Short", "Integer", "Long", "Boolean", + "LocalDateTime", "Duration", "Period", "Double", "Float", + "Character", "Text", "Byte", "byte[]", "PeriodDuration"] as friendlyType> + <#assign safeType=friendlyType /> + <#if safeType=="byte[]"><#assign safeType="ByteArray" /> + + @Override + public ${friendlyType} read${safeType}() { + return getReaderForIndex(idx()).read${safeType}(); + } + + + + public int size() { + return getReaderForIndex(idx()).size(); + } + + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign uncappedName = name?uncap_first/> + <#assign boxedType = (minor.boxedType!type.boxedType) /> + <#assign javaType = (minor.javaType!type.javaType) /> + <#assign friendlyType = (minor.friendlyType!minor.boxedType!type.boxedType) /> + <#assign safeType=friendlyType /> + <#if safeType=="byte[]"><#assign safeType="ByteArray" /> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal")> + + private ${name}ReaderImpl get${name}(byte typeId) { + ${name}ReaderImpl reader = (${name}ReaderImpl) readers[typeId]; + if (reader == null) { + reader = new ${name}ReaderImpl((${name}Vector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(Nullable${name}Holder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(${name}Writer writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + + + + @Override + public void copyAsValue(ListWriter writer) { + ComplexCopier.copy(this, (FieldWriter) writer); + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + byte typeId = data.getTypeId(index); + if (readers[typeId] != null) { + int offset = data.getOffset(index); + readers[typeId].setPosition(offset); + } + } + + public FieldReader reader(byte typeId, String name){ + return getStruct(typeId).reader(name); + } + + public FieldReader reader(byte typeId) { + return getList(typeId).reader(); + } + + public boolean next() { + return getReaderForIndex(idx()).next(); + } +} diff --git a/java/vector/target/classes/codegen/templates/DenseUnionVector.java b/java/vector/target/classes/codegen/templates/DenseUnionVector.java new file mode 100644 index 000000000000..42e96f7aca33 --- /dev/null +++ b/java/vector/target/classes/codegen/templates/DenseUnionVector.java @@ -0,0 +1,997 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.arrow.memory.ArrowBuf; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.memory.ReferenceManager; +import org.apache.arrow.memory.util.CommonUtil; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.BaseValueVector; +import org.apache.arrow.vector.BitVectorHelper; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.ValueVector; +import org.apache.arrow.vector.complex.AbstractStructVector; +import org.apache.arrow.vector.complex.ListVector; +import org.apache.arrow.vector.complex.NonNullableStructVector; +import org.apache.arrow.vector.complex.StructVector; +import org.apache.arrow.vector.compare.VectorVisitor; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.UnionMode; +import org.apache.arrow.vector.compare.RangeEqualsVisitor; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.types.pojo.FieldType; +import org.apache.arrow.vector.util.CallBack; +import org.apache.arrow.vector.util.DataSizeRoundingUtil; +import org.apache.arrow.vector.util.TransferPair; + +import java.util.Arrays; +import java.util.stream.Collectors; + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/DenseUnionVector.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex; + +<#include "/@includes/vv_imports.ftl" /> +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import org.apache.arrow.memory.util.CommonUtil; +import org.apache.arrow.memory.util.hash.ArrowBufHasher; +import org.apache.arrow.memory.util.hash.SimpleHasher; +import org.apache.arrow.vector.compare.VectorVisitor; +import org.apache.arrow.vector.complex.impl.ComplexCopier; +import org.apache.arrow.vector.util.CallBack; +import org.apache.arrow.vector.ipc.message.ArrowFieldNode; +import org.apache.arrow.vector.BaseValueVector; +import org.apache.arrow.vector.util.OversizedAllocationException; +import org.apache.arrow.util.Preconditions; + +import static org.apache.arrow.vector.types.UnionMode.Dense; + + + +/* + * This class is generated using freemarker and the ${.template_name} template. + */ +@SuppressWarnings("unused") + + +/** + * A vector which can hold values of different types. It does so by using a StructVector which contains a vector for each + * primitive type that is stored. StructVector is used in order to take advantage of its serialization/deserialization methods, + * as well as the addOrGet method. + * + * For performance reasons, DenseUnionVector stores a cached reference to each subtype vector, to avoid having to do the struct lookup + * each time the vector is accessed. + * Source code generated using FreeMarker template ${.template_name} + */ +public class DenseUnionVector extends AbstractContainerVector implements FieldVector { + int valueCount; + + NonNullableStructVector internalStruct; + private ArrowBuf typeBuffer; + private ArrowBuf offsetBuffer; + + /** + * The key is type Id, and the value is vector. + */ + private ValueVector[] childVectors = new ValueVector[Byte.MAX_VALUE + 1]; + + /** + * The index is the type id, and the value is the type field. + */ + private Field[] typeFields = new Field[Byte.MAX_VALUE + 1]; + /** + * The index is the index into the typeFields array, and the value is the logical field id. + */ + private byte[] typeMapFields = new byte[Byte.MAX_VALUE + 1]; + + /** + * The next type id to allocate. + */ + private byte nextTypeId = 0; + + private FieldReader reader; + + private long typeBufferAllocationSizeInBytes; + private long offsetBufferAllocationSizeInBytes; + + private final FieldType fieldType; + + public static final byte TYPE_WIDTH = 1; + public static final byte OFFSET_WIDTH = 4; + + private static final FieldType INTERNAL_STRUCT_TYPE = new FieldType(/*nullable*/ false, + ArrowType.Struct.INSTANCE, /*dictionary*/ null, /*metadata*/ null); + + public static DenseUnionVector empty(String name, BufferAllocator allocator) { + FieldType fieldType = FieldType.notNullable(new ArrowType.Union( + UnionMode.Dense, null)); + return new DenseUnionVector(name, allocator, fieldType, null); + } + + public DenseUnionVector(String name, BufferAllocator allocator, FieldType fieldType, CallBack callBack) { + super(name, allocator, callBack); + this.fieldType = fieldType; + this.internalStruct = new NonNullableStructVector( + "internal", + allocator, + INTERNAL_STRUCT_TYPE, + callBack, + AbstractStructVector.ConflictPolicy.CONFLICT_REPLACE, + false); + this.typeBuffer = allocator.getEmpty(); + this.typeBufferAllocationSizeInBytes = BaseValueVector.INITIAL_VALUE_ALLOCATION * TYPE_WIDTH; + this.offsetBuffer = allocator.getEmpty(); + this.offsetBufferAllocationSizeInBytes = BaseValueVector.INITIAL_VALUE_ALLOCATION * OFFSET_WIDTH; + } + + public BufferAllocator getAllocator() { + return allocator; + } + + @Override + public MinorType getMinorType() { + return MinorType.DENSEUNION; + } + + @Override + public void initializeChildrenFromFields(List children) { + for (Field field : children) { + byte typeId = registerNewTypeId(field); + FieldVector vector = (FieldVector) internalStruct.add(field.getName(), field.getFieldType()); + vector.initializeChildrenFromFields(field.getChildren()); + childVectors[typeId] = vector; + } + } + + @Override + public List getChildrenFromFields() { + return internalStruct.getChildrenFromFields(); + } + + @Override + public void loadFieldBuffers(ArrowFieldNode fieldNode, List ownBuffers) { + if (ownBuffers.size() != 2) { + throw new IllegalArgumentException("Illegal buffer count for dense union with type " + getField().getFieldType() + + ", expected " + 2 + ", got: " + ownBuffers.size()); + } + + ArrowBuf buffer = ownBuffers.get(0); + typeBuffer.getReferenceManager().release(); + typeBuffer = buffer.getReferenceManager().retain(buffer, allocator); + typeBufferAllocationSizeInBytes = typeBuffer.capacity(); + + buffer = ownBuffers.get(1); + offsetBuffer.getReferenceManager().release(); + offsetBuffer = buffer.getReferenceManager().retain(buffer, allocator); + offsetBufferAllocationSizeInBytes = offsetBuffer.capacity(); + + this.valueCount = fieldNode.getLength(); + } + + @Override + public List getFieldBuffers() { + List result = new ArrayList<>(2); + setReaderAndWriterIndex(); + result.add(typeBuffer); + result.add(offsetBuffer); + + return result; + } + + private void setReaderAndWriterIndex() { + typeBuffer.readerIndex(0); + typeBuffer.writerIndex(valueCount * TYPE_WIDTH); + + offsetBuffer.readerIndex(0); + offsetBuffer.writerIndex((long) valueCount * OFFSET_WIDTH); + } + + /** + * Get the inner vectors. + * + * @deprecated This API will be removed as the current implementations no longer support inner vectors. + * + * @return the inner vectors for this field as defined by the TypeLayout + */ + @Override + @Deprecated + public List getFieldInnerVectors() { + throw new UnsupportedOperationException("There are no inner vectors. Use geFieldBuffers"); + } + + private String fieldName(byte typeId, MinorType type) { + return type.name().toLowerCase() + typeId; + } + + private FieldType fieldType(MinorType type) { + return FieldType.nullable(type.getType()); + } + + public synchronized byte registerNewTypeId(Field field) { + if (nextTypeId == typeFields.length) { + throw new IllegalStateException("Dense union vector support at most " + + typeFields.length + " relative types. Please use union of union instead"); + } + byte typeId = nextTypeId; + if (this.fieldType != null) { + int[] typeIds = ((ArrowType.Union) this.fieldType.getType()).getTypeIds(); + if (typeIds != null) { + int thisTypeId = typeIds[nextTypeId]; + if (thisTypeId > Byte.MAX_VALUE) { + throw new IllegalStateException("Dense union vector types must be bytes. " + thisTypeId + " is too large"); + } + typeId = (byte) thisTypeId; + } + } + typeFields[typeId] = field; + typeMapFields[nextTypeId] = typeId; + this.nextTypeId += 1; + return typeId; + } + + private T addOrGet(byte typeId, MinorType minorType, Class c) { + return internalStruct.addOrGet(fieldName(typeId, minorType), fieldType(minorType), c); + } + + private T addOrGet(byte typeId, MinorType minorType, ArrowType arrowType, Class c) { + return internalStruct.addOrGet(fieldName(typeId, minorType), FieldType.nullable(arrowType), c); + } + + @Override + public long getOffsetBufferAddress() { + return offsetBuffer.memoryAddress(); + } + + @Override + public long getDataBufferAddress() { + throw new UnsupportedOperationException(); + } + + @Override + public long getValidityBufferAddress() { + throw new UnsupportedOperationException(); + } + + @Override + public ArrowBuf getValidityBuffer() { throw new UnsupportedOperationException(); } + + @Override + public ArrowBuf getOffsetBuffer() { return offsetBuffer; } + + public ArrowBuf getTypeBuffer() { return typeBuffer; } + + @Override + public ArrowBuf getDataBuffer() { throw new UnsupportedOperationException(); } + + public StructVector getStruct(byte typeId) { + StructVector structVector = typeId < 0 ? null : (StructVector) childVectors[typeId]; + if (structVector == null) { + int vectorCount = internalStruct.size(); + structVector = addOrGet(typeId, MinorType.STRUCT, StructVector.class); + if (internalStruct.size() > vectorCount) { + structVector.allocateNew(); + childVectors[typeId] = structVector; + if (callBack != null) { + callBack.doWork(); + } + } + } + return structVector; + } + + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#assign lowerCaseName = name?lower_case/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal")> + + public ${name}Vector get${name}Vector(byte typeId<#if minor.class?starts_with("Decimal")>, ArrowType arrowType) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.${name?upper_case}<#if minor.class?starts_with("Decimal")>, arrowType, ${name}Vector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (${name}Vector) vector; + } + + + + + public ListVector getList(byte typeId) { + ListVector listVector = typeId < 0 ? null : (ListVector) childVectors[typeId]; + if (listVector == null) { + int vectorCount = internalStruct.size(); + listVector = addOrGet(typeId, MinorType.LIST, ListVector.class); + if (internalStruct.size() > vectorCount) { + listVector.allocateNew(); + childVectors[typeId] = listVector; + if (callBack != null) { + callBack.doWork(); + } + } + } + return listVector; + } + + public MapVector getMap(byte typeId) { + MapVector mapVector = typeId < 0 ? null : (MapVector) childVectors[typeId]; + if (mapVector == null) { + int vectorCount = internalStruct.size(); + mapVector = addOrGet(typeId, MinorType.MAP, MapVector.class); + if (internalStruct.size() > vectorCount) { + mapVector.allocateNew(); + childVectors[typeId] = mapVector; + if (callBack != null) { + callBack.doWork(); + } + } + } + return mapVector; + } + + public byte getTypeId(int index) { + return typeBuffer.getByte(index * TYPE_WIDTH); + } + + public ValueVector getVectorByType(byte typeId) { + return typeId < 0 ? null : childVectors[typeId]; + } + + @Override + public void allocateNew() throws OutOfMemoryException { + /* new allocation -- clear the current buffers */ + clear(); + internalStruct.allocateNew(); + try { + allocateTypeBuffer(); + allocateOffsetBuffer(); + } catch (Exception e) { + clear(); + throw e; + } + } + + @Override + public boolean allocateNewSafe() { + /* new allocation -- clear the current buffers */ + clear(); + boolean safe = internalStruct.allocateNewSafe(); + if (!safe) { return false; } + try { + allocateTypeBuffer(); + allocateOffsetBuffer(); + } catch (Exception e) { + clear(); + return false; + } + + return true; + } + + private void allocateTypeBuffer() { + typeBuffer = allocator.buffer(typeBufferAllocationSizeInBytes); + typeBuffer.readerIndex(0); + setNegative(0, typeBuffer.capacity()); + } + + private void allocateOffsetBuffer() { + offsetBuffer = allocator.buffer(offsetBufferAllocationSizeInBytes); + offsetBuffer.readerIndex(0); + offsetBuffer.setZero(0, offsetBuffer.capacity()); + } + + + @Override + public void reAlloc() { + internalStruct.reAlloc(); + reallocTypeBuffer(); + reallocOffsetBuffer(); + } + + public int getOffset(int index) { + return offsetBuffer.getInt((long) index * OFFSET_WIDTH); + } + + private void reallocTypeBuffer() { + final long currentBufferCapacity = typeBuffer.capacity(); + long newAllocationSize = currentBufferCapacity * 2; + if (newAllocationSize == 0) { + if (typeBufferAllocationSizeInBytes > 0) { + newAllocationSize = typeBufferAllocationSizeInBytes; + } else { + newAllocationSize = BaseValueVector.INITIAL_VALUE_ALLOCATION * TYPE_WIDTH * 2; + } + } + + newAllocationSize = CommonUtil.nextPowerOfTwo(newAllocationSize); + assert newAllocationSize >= 1; + + if (newAllocationSize > BaseValueVector.MAX_ALLOCATION_SIZE) { + throw new OversizedAllocationException("Unable to expand the buffer"); + } + + final ArrowBuf newBuf = allocator.buffer((int)newAllocationSize); + newBuf.setBytes(0, typeBuffer, 0, currentBufferCapacity); + typeBuffer.getReferenceManager().release(1); + typeBuffer = newBuf; + typeBufferAllocationSizeInBytes = (int)newAllocationSize; + setNegative(currentBufferCapacity, newBuf.capacity() - currentBufferCapacity); + } + + private void reallocOffsetBuffer() { + final long currentBufferCapacity = offsetBuffer.capacity(); + long newAllocationSize = currentBufferCapacity * 2; + if (newAllocationSize == 0) { + if (offsetBufferAllocationSizeInBytes > 0) { + newAllocationSize = offsetBufferAllocationSizeInBytes; + } else { + newAllocationSize = BaseValueVector.INITIAL_VALUE_ALLOCATION * OFFSET_WIDTH * 2; + } + } + + newAllocationSize = CommonUtil.nextPowerOfTwo(newAllocationSize); + assert newAllocationSize >= 1; + + if (newAllocationSize > BaseValueVector.MAX_ALLOCATION_SIZE) { + throw new OversizedAllocationException("Unable to expand the buffer"); + } + + final ArrowBuf newBuf = allocator.buffer((int) newAllocationSize); + newBuf.setBytes(0, offsetBuffer, 0, currentBufferCapacity); + newBuf.setZero(currentBufferCapacity, newBuf.capacity() - currentBufferCapacity); + offsetBuffer.getReferenceManager().release(1); + offsetBuffer = newBuf; + offsetBufferAllocationSizeInBytes = (int) newAllocationSize; + } + + @Override + public void setInitialCapacity(int numRecords) { } + + @Override + public int getValueCapacity() { + long capacity = getTypeBufferValueCapacity(); + long offsetCapacity = getOffsetBufferValueCapacity(); + if (offsetCapacity < capacity) { + capacity = offsetCapacity; + } + long structCapacity = internalStruct.getValueCapacity(); + if (structCapacity < capacity) { + structCapacity = capacity; + } + return (int) capacity; + } + + @Override + public void close() { + clear(); + } + + @Override + public void clear() { + valueCount = 0; + typeBuffer.getReferenceManager().release(); + typeBuffer = allocator.getEmpty(); + offsetBuffer.getReferenceManager().release(); + offsetBuffer = allocator.getEmpty(); + internalStruct.clear(); + } + + @Override + public void reset() { + valueCount = 0; + setNegative(0, typeBuffer.capacity()); + offsetBuffer.setZero(0, offsetBuffer.capacity()); + internalStruct.reset(); + } + + @Override + public Field getField() { + int childCount = (int) Arrays.stream(typeFields).filter(field -> field != null).count(); + List childFields = new ArrayList<>(childCount); + int[] typeIds = new int[childCount]; + for (int i = 0; i < typeFields.length; i++) { + if (typeFields[i] != null) { + int curIdx = childFields.size(); + typeIds[curIdx] = i; + childFields.add(typeFields[i]); + } + } + + FieldType fieldType; + if (this.fieldType == null) { + fieldType = FieldType.nullable(new ArrowType.Union(Dense, typeIds)); + } else { + final UnionMode mode = UnionMode.Dense; + fieldType = new FieldType(this.fieldType.isNullable(), new ArrowType.Union(mode, typeIds), + this.fieldType.getDictionary(), this.fieldType.getMetadata()); + } + + return new Field(name, fieldType, childFields); + } + + @Override + public TransferPair getTransferPair(BufferAllocator allocator) { + return getTransferPair(name, allocator); + } + + @Override + public TransferPair getTransferPair(String ref, BufferAllocator allocator) { + return getTransferPair(ref, allocator, null); + } + + @Override + public TransferPair getTransferPair(String ref, BufferAllocator allocator, CallBack callBack) { + return new org.apache.arrow.vector.complex.DenseUnionVector.TransferImpl(ref, allocator, callBack); + } + + @Override + public TransferPair getTransferPair(Field field, BufferAllocator allocator) { + return getTransferPair(field, allocator, null); + } + + @Override + public TransferPair getTransferPair(Field field, BufferAllocator allocator, CallBack callBack) { + return new org.apache.arrow.vector.complex.DenseUnionVector.TransferImpl(field, allocator, callBack); + } + + @Override + public TransferPair makeTransferPair(ValueVector target) { + return new TransferImpl((DenseUnionVector) target); + } + + @Override + public void copyFrom(int inIndex, int outIndex, ValueVector from) { + Preconditions.checkArgument(this.getMinorType() == from.getMinorType()); + DenseUnionVector fromCast = (DenseUnionVector) from; + int inOffset = fromCast.offsetBuffer.getInt((long) inIndex * OFFSET_WIDTH); + fromCast.getReader().setPosition(inOffset); + int outOffset = offsetBuffer.getInt((long) outIndex * OFFSET_WIDTH); + getWriter().setPosition(outOffset); + ComplexCopier.copy(fromCast.reader, writer); + } + + @Override + public void copyFromSafe(int inIndex, int outIndex, ValueVector from) { + copyFrom(inIndex, outIndex, from); + } + + public FieldVector addVector(byte typeId, FieldVector v) { + final String name = v.getName().isEmpty() ? fieldName(typeId, v.getMinorType()) : v.getName(); + Preconditions.checkState(internalStruct.getChild(name) == null, String.format("%s vector already exists", name)); + final FieldVector newVector = internalStruct.addOrGet(name, v.getField().getFieldType(), v.getClass()); + v.makeTransferPair(newVector).transfer(); + internalStruct.putChild(name, newVector); + childVectors[typeId] = newVector; + if (callBack != null) { + callBack.doWork(); + } + return newVector; + } + + private class TransferImpl implements TransferPair { + private final TransferPair[] internalTransferPairs = new TransferPair[nextTypeId]; + private final DenseUnionVector to; + + public TransferImpl(String name, BufferAllocator allocator, CallBack callBack) { + to = new DenseUnionVector(name, allocator, null, callBack); + internalStruct.makeTransferPair(to.internalStruct); + createTransferPairs(); + } + + public TransferImpl(Field field, BufferAllocator allocator, CallBack callBack) { + to = new DenseUnionVector(field.getName(), allocator, null, callBack); + internalStruct.makeTransferPair(to.internalStruct); + createTransferPairs(); + } + + public TransferImpl(DenseUnionVector to) { + this.to = to; + internalStruct.makeTransferPair(to.internalStruct); + createTransferPairs(); + } + + private void createTransferPairs() { + for (int i = 0; i < nextTypeId; i++) { + ValueVector srcVec = internalStruct.getVectorById(i); + ValueVector dstVec = to.internalStruct.getVectorById(i); + to.typeFields[i] = typeFields[i]; + to.typeMapFields[i] = typeMapFields[i]; + to.childVectors[i] = dstVec; + internalTransferPairs[i] = srcVec.makeTransferPair(dstVec); + } + } + + @Override + public void transfer() { + to.clear(); + + ReferenceManager refManager = typeBuffer.getReferenceManager(); + to.typeBuffer = refManager.transferOwnership(typeBuffer, to.allocator).getTransferredBuffer(); + + refManager = offsetBuffer.getReferenceManager(); + to.offsetBuffer = refManager.transferOwnership(offsetBuffer, to.allocator).getTransferredBuffer(); + + for (int i = 0; i < nextTypeId; i++) { + if (internalTransferPairs[i] != null) { + internalTransferPairs[i].transfer(); + to.childVectors[i] = internalTransferPairs[i].getTo(); + } + } + to.valueCount = valueCount; + clear(); + } + + @Override + public void splitAndTransfer(int startIndex, int length) { + to.clear(); + + // transfer type buffer + int startPoint = startIndex * TYPE_WIDTH; + int sliceLength = length * TYPE_WIDTH; + ArrowBuf slicedBuffer = typeBuffer.slice(startPoint, sliceLength); + ReferenceManager refManager = slicedBuffer.getReferenceManager(); + to.typeBuffer = refManager.transferOwnership(slicedBuffer, to.allocator).getTransferredBuffer(); + + // transfer offset buffer + while (to.offsetBuffer.capacity() < (long) length * OFFSET_WIDTH) { + to.reallocOffsetBuffer(); + } + + int [] typeCounts = new int[nextTypeId]; + int [] typeStarts = new int[nextTypeId]; + for (int i = 0; i < typeCounts.length; i++) { + typeCounts[i] = 0; + typeStarts[i] = -1; + } + + for (int i = startIndex; i < startIndex + length; i++) { + byte typeId = typeBuffer.getByte(i); + if (typeId >= 0) { + to.offsetBuffer.setInt((long) (i - startIndex) * OFFSET_WIDTH, typeCounts[typeId]); + typeCounts[typeId] += 1; + if (typeStarts[typeId] == -1) { + typeStarts[typeId] = offsetBuffer.getInt((long) i * OFFSET_WIDTH); + } + } + } + + // transfer vector values + for (int i = 0; i < nextTypeId; i++) { + if (typeCounts[i] > 0 && typeStarts[i] != -1) { + internalTransferPairs[i].splitAndTransfer(typeStarts[i], typeCounts[i]); + to.childVectors[i] = internalTransferPairs[i].getTo(); + } + } + + to.setValueCount(length); + } + + @Override + public ValueVector getTo() { + return to; + } + + @Override + public void copyValueSafe(int from, int to) { + this.to.copyFrom(from, to, DenseUnionVector.this); + } + } + + @Override + public FieldReader getReader() { + if (reader == null) { + reader = new DenseUnionReader(this); + } + return reader; + } + + public FieldWriter getWriter() { + if (writer == null) { + writer = new DenseUnionWriter(this); + } + return writer; + } + + @Override + public int getBufferSize() { + return this.getBufferSizeFor(this.valueCount); + } + + @Override + public int getBufferSizeFor(final int count) { + if (count == 0) { + return 0; + } + + int[] counts = new int[Byte.MAX_VALUE + 1]; + for (int i = 0; i < count; i++) { + byte typeId = getTypeId(i); + if (typeId != -1) { + counts[typeId] += 1; + } + } + + long childBytes = 0; + for (int typeId = 0; typeId < childVectors.length; typeId++) { + ValueVector childVector = childVectors[typeId]; + if (childVector != null) { + childBytes += childVector.getBufferSizeFor(counts[typeId]); + } + } + + return (int) (count * TYPE_WIDTH + (long) count * OFFSET_WIDTH + childBytes); + } + + @Override + public ArrowBuf[] getBuffers(boolean clear) { + List list = new java.util.ArrayList<>(); + setReaderAndWriterIndex(); + if (getBufferSize() != 0) { + list.add(typeBuffer); + list.add(offsetBuffer); + list.addAll(java.util.Arrays.asList(internalStruct.getBuffers(clear))); + } + if (clear) { + valueCount = 0; + typeBuffer.getReferenceManager().retain(); + typeBuffer.close(); + typeBuffer = allocator.getEmpty(); + offsetBuffer.getReferenceManager().retain(); + offsetBuffer.close(); + offsetBuffer = allocator.getEmpty(); + } + return list.toArray(new ArrowBuf[list.size()]); + } + + @Override + public Iterator iterator() { + return internalStruct.iterator(); + } + + private ValueVector getVector(int index) { + byte typeId = typeBuffer.getByte(index * TYPE_WIDTH); + return getVectorByType(typeId); + } + + public Object getObject(int index) { + ValueVector vector = getVector(index); + if (vector != null) { + int offset = offsetBuffer.getInt((long) index * OFFSET_WIDTH); + return vector.isNull(offset) ? null : vector.getObject(offset); + } + return null; + } + + public void get(int index, DenseUnionHolder holder) { + FieldReader reader = new DenseUnionReader(DenseUnionVector.this); + reader.setPosition(index); + holder.reader = reader; + } + + public int getValueCount() { + return valueCount; + } + + /** + * IMPORTANT: Union types always return non null as there is no validity buffer. + * + * To check validity correctly you must check the underlying vector. + */ + public boolean isNull(int index) { + return false; + } + + @Override + public int getNullCount() { + return 0; + } + + public int isSet(int index) { + return isNull(index) ? 0 : 1; + } + + DenseUnionWriter writer; + + public void setValueCount(int valueCount) { + this.valueCount = valueCount; + while (valueCount > getTypeBufferValueCapacity()) { + reallocTypeBuffer(); + reallocOffsetBuffer(); + } + setChildVectorValueCounts(); + } + + private void setChildVectorValueCounts() { + int [] counts = new int[Byte.MAX_VALUE + 1]; + for (int i = 0; i < this.valueCount; i++) { + byte typeId = getTypeId(i); + if (typeId != -1) { + counts[typeId] += 1; + } + } + for (int i = 0; i < nextTypeId; i++) { + childVectors[typeMapFields[i]].setValueCount(counts[typeMapFields[i]]); + } + } + + public void setSafe(int index, DenseUnionHolder holder) { + FieldReader reader = holder.reader; + if (writer == null) { + writer = new DenseUnionWriter(DenseUnionVector.this); + } + int offset = offsetBuffer.getInt((long) index * OFFSET_WIDTH); + MinorType type = reader.getMinorType(); + writer.setPosition(offset); + byte typeId = holder.typeId; + switch (type) { + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal")> + case ${name?upper_case}: + Nullable${name}Holder ${uncappedName}Holder = new Nullable${name}Holder(); + reader.read(${uncappedName}Holder); + setSafe(index, ${uncappedName}Holder); + break; + + + + case STRUCT: + case LIST: { + setTypeId(index, typeId); + ComplexCopier.copy(reader, writer); + break; + } + default: + throw new UnsupportedOperationException(); + } + } + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal")> + public void setSafe(int index, Nullable${name}Holder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + ${name}Vector vector = get${name}Vector(typeId<#if minor.class?starts_with("Decimal")>, new ArrowType.Decimal(holder.precision, holder.scale, holder.WIDTH * 8)); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + + + + + public void setTypeId(int index, byte typeId) { + while (index >= getTypeBufferValueCapacity()) { + reallocTypeBuffer(); + } + typeBuffer.setByte(index * TYPE_WIDTH , typeId); + } + + private int getTypeBufferValueCapacity() { + return (int) typeBuffer.capacity() / TYPE_WIDTH; + } + + public void setOffset(int index, int offset) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + + private long getOffsetBufferValueCapacity() { + return offsetBuffer.capacity() / OFFSET_WIDTH; + } + + @Override + public int hashCode(int index, ArrowBufHasher hasher) { + if (isNull(index)) { + return 0; + } + int offset = offsetBuffer.getInt((long) index * OFFSET_WIDTH); + return getVector(index).hashCode(offset, hasher); + } + + @Override + public int hashCode(int index) { + return hashCode(index, SimpleHasher.INSTANCE); + } + + @Override + public OUT accept(VectorVisitor visitor, IN value) { + return visitor.visit(this, value); + } + + @Override + public String getName() { + return name; + } + + private void setNegative(long start, long end) { + for (long i = start;i < end; i++) { + typeBuffer.setByte(i, -1); + } + } + + @Override + public T addOrGet(String name, FieldType fieldType, Class clazz) { + return internalStruct.addOrGet(name, fieldType, clazz); + } + + @Override + public T getChild(String name, Class clazz) { + return internalStruct.getChild(name, clazz); + } + + @Override + public VectorWithOrdinal getChildVectorWithOrdinal(String name) { + return internalStruct.getChildVectorWithOrdinal(name); + } + + @Override + public int size() { + return internalStruct.size(); + } + + @Override + public void setInitialCapacity(int valueCount, double density) { + for (final ValueVector vector : internalStruct) { + if (vector instanceof DensityAwareVector) { + ((DensityAwareVector) vector).setInitialCapacity(valueCount, density); + } else { + vector.setInitialCapacity(valueCount); + } + } + } + + /** + * Set the element at the given index to null. For DenseUnionVector, it throws an UnsupportedOperationException + * as nulls are not supported at the top level and isNull() always returns false. + * + * @param index position of element + * @throws UnsupportedOperationException whenever invoked + */ + @Override + public void setNull(int index) { + throw new UnsupportedOperationException("The method setNull() is not supported on DenseUnionVector."); + } +} diff --git a/java/vector/target/classes/codegen/templates/DenseUnionWriter.java b/java/vector/target/classes/codegen/templates/DenseUnionWriter.java new file mode 100644 index 000000000000..e69a62a9e0f6 --- /dev/null +++ b/java/vector/target/classes/codegen/templates/DenseUnionWriter.java @@ -0,0 +1,302 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.arrow.vector.complex.StructVector; +import org.apache.arrow.vector.complex.impl.NullableStructWriterFactory; +import org.apache.arrow.vector.types.Types; + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/DenseUnionWriter.java" /> + + +<#include "/@includes/license.ftl" /> + + package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + import org.apache.arrow.vector.complex.writer.BaseWriter; + import org.apache.arrow.vector.types.Types.MinorType; + +/* + * This class is generated using freemarker and the ${.template_name} template. + */ +@SuppressWarnings("unused") +public class DenseUnionWriter extends AbstractFieldWriter implements FieldWriter { + + DenseUnionVector data; + + private BaseWriter[] writers = new BaseWriter[Byte.MAX_VALUE + 1]; + private final NullableStructWriterFactory nullableStructWriterFactory; + + public DenseUnionWriter(DenseUnionVector vector) { + this(vector, NullableStructWriterFactory.getNullableStructWriterFactoryInstance()); + } + + public DenseUnionWriter(DenseUnionVector vector, NullableStructWriterFactory nullableStructWriterFactory) { + data = vector; + this.nullableStructWriterFactory = nullableStructWriterFactory; + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + for (BaseWriter writer : writers) { + writer.setPosition(index); + } + } + + @Override + public void start() { + byte typeId = data.getTypeId(idx()); + getStructWriter((byte) idx()).start(); + } + + @Override + public void end() { + byte typeId = data.getTypeId(idx()); + getStructWriter(typeId).end(); + } + + @Override + public void startList() { + byte typeId = data.getTypeId(idx()); + getListWriter(typeId).startList(); + } + + @Override + public void endList() { + byte typeId = data.getTypeId(idx()); + getListWriter(typeId).endList(); + } + + private StructWriter getStructWriter(byte typeId) { + StructWriter structWriter = (StructWriter) writers[typeId]; + if (structWriter == null) { + structWriter = nullableStructWriterFactory.build((StructVector) data.getVectorByType(typeId)); + writers[typeId] = structWriter; + } + return structWriter; + } + + public StructWriter asStruct(byte typeId) { + data.setTypeId(idx(), typeId); + return getStructWriter(typeId); + } + + private ListWriter getListWriter(byte typeId) { + ListWriter listWriter = (ListWriter) writers[typeId]; + if (listWriter == null) { + listWriter = new UnionListWriter((ListVector) data.getVectorByType(typeId), nullableStructWriterFactory); + writers[typeId] = listWriter; + } + return listWriter; + } + + public ListWriter asList(byte typeId) { + data.setTypeId(idx(), typeId); + return getListWriter(typeId); + } + + private MapWriter getMapWriter(byte typeId) { + MapWriter mapWriter = (MapWriter) writers[typeId]; + if (mapWriter == null) { + mapWriter = new UnionMapWriter((MapVector) data.getVectorByType(typeId)); + writers[typeId] = mapWriter; + } + return mapWriter; + } + + public MapWriter asMap(byte typeId) { + data.setTypeId(idx(), typeId); + return getMapWriter(typeId); + } + + BaseWriter getWriter(byte typeId) { + MinorType minorType = data.getVectorByType(typeId).getMinorType(); + switch (minorType) { + case STRUCT: + return getStructWriter(typeId); + case LIST: + return getListWriter(typeId); + case MAP: + return getMapWriter(typeId); + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal")> + case ${name?upper_case}: + return get${name}Writer(typeId); + + + + default: + throw new UnsupportedOperationException("Unknown type: " + minorType); + } + } + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal")> + + private ${name}Writer get${name}Writer(byte typeId) { + ${name}Writer writer = (${name}Writer) writers[typeId]; + if (writer == null) { + writer = new ${name}WriterImpl((${name}Vector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public ${name}Writer as${name}(byte typeId) { + data.setTypeId(idx(), typeId); + return get${name}Writer(typeId); + } + + @Override + public void write(${name}Holder holder) { + throw new UnsupportedOperationException(); + } + + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, , byte typeId<#if minor.class?starts_with("Decimal")>, ArrowType arrowType) { + data.setTypeId(idx(), typeId); + get${name}Writer(typeId).setPosition(data.getOffset(idx())); + get${name}Writer(typeId).write${name}(<#list fields as field>${field.name}<#if field_has_next>, <#if minor.class?starts_with("Decimal")>, arrowType); + } + + + + + public void writeNull() { + } + + @Override + public StructWriter struct() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).struct(); + } + + @Override + public ListWriter list() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).list(); + } + + @Override + public ListWriter list(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).list(name); + } + + @Override + public MapWriter map() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getMapWriter(typeId).map(); + } + + @Override + public MapWriter map(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).map(name); + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).map(name, keysSorted); + } + + @Override + public StructWriter struct(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).struct(name); + } + + <#list vv.types as type><#list type.minor as minor> + <#assign lowerName = minor.class?uncap_first /> + <#if lowerName == "int" ><#assign lowerName = "integer" /> + <#assign upperName = minor.class?upper_case /> + <#assign capName = minor.class?cap_first /> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") > + @Override + public ${capName}Writer ${lowerName}(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).${lowerName}(name); + } + + @Override + public ${capName}Writer ${lowerName}() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).${lowerName}(); + } + + <#if minor.class?starts_with("Decimal")> + public ${capName}Writer ${lowerName}(String name<#list minor.typeParams as typeParam>, ${typeParam.type} ${typeParam.name}) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).${lowerName}(name<#list minor.typeParams as typeParam>, ${typeParam.name}); + } + + + + @Override + public void allocate() { + data.allocateNew(); + } + + @Override + public void clear() { + data.clear(); + } + + @Override + public void close() throws Exception { + data.close(); + } + + @Override + public Field getField() { + return data.getField(); + } + + @Override + public int getValueCapacity() { + return data.getValueCapacity(); + } +} diff --git a/java/vector/target/classes/codegen/templates/HolderReaderImpl.java b/java/vector/target/classes/codegen/templates/HolderReaderImpl.java new file mode 100644 index 000000000000..8394aaad4175 --- /dev/null +++ b/java/vector/target/classes/codegen/templates/HolderReaderImpl.java @@ -0,0 +1,173 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +<@pp.dropOutputFile /> +<#list vv.types as type> +<#list type.minor as minor> +<#list ["", "Nullable"] as holderMode> +<#assign nullMode = holderMode /> + +<#assign lowerName = minor.class?uncap_first /> +<#if lowerName == "int" ><#assign lowerName = "integer" /> +<#assign name = minor.class?cap_first /> +<#assign javaType = (minor.javaType!type.javaType) /> +<#assign friendlyType = (minor.friendlyType!minor.boxedType!type.boxedType) /> +<#assign safeType=friendlyType /> +<#if safeType=="byte[]"><#assign safeType="ByteArray" /> +<#assign fields = (minor.fields!type.fields) + minor.typeParams![]/> + +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/${holderMode}${name}HolderReaderImpl.java" /> +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + +// Source code generated using FreeMarker template ${.template_name} + +@SuppressWarnings("unused") +public class ${holderMode}${name}HolderReaderImpl extends AbstractFieldReader { + + private ${nullMode}${name}Holder holder; + public ${holderMode}${name}HolderReaderImpl(${holderMode}${name}Holder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.${name?upper_case}; + } + + @Override + public boolean isSet() { + <#if holderMode == "Nullable"> + return this.holder.isSet == 1; + <#else> + return true; + + } + + @Override + public void read(${name}Holder h) { + <#list fields as field> + h.${field.name} = holder.${field.name}; + + } + + @Override + public void read(Nullable${name}Holder h) { + <#list fields as field> + h.${field.name} = holder.${field.name}; + + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public ${friendlyType} read${safeType}() { + <#if nullMode == "Nullable"> + if (!isSet()) { + return null; + } + + + <#if type.major == "VarLen"> + <#if type.width == 4> + int length = holder.end - holder.start; + <#elseif type.width == 8> + int length = (int) (holder.end - holder.start); + + byte[] value = new byte [length]; + holder.buffer.getBytes(holder.start, value, 0, length); + <#if minor.class == "VarBinary" || minor.class == "LargeVarBinary"> + return value; + <#elseif minor.class == "VarChar" || minor.class == "LargeVarChar"> + Text text = new Text(); + text.set(value); + return text; + + <#elseif minor.class == "IntervalDay"> + return Duration.ofDays(holder.days).plusMillis(holder.milliseconds); + <#elseif minor.class == "IntervalYear"> + return Period.ofMonths(holder.value); + <#elseif minor.class == "IntervalMonthDayNano"> + return new PeriodDuration(Period.ofMonths(holder.months).plusDays(holder.days), + Duration.ofNanos(holder.nanoseconds)); + <#elseif minor.class == "Duration"> + return DurationVector.toDuration(holder.value, holder.unit); + <#elseif minor.class == "Bit" > + return new Boolean(holder.value != 0); + <#elseif minor.class == "Decimal"> + byte[] bytes = new byte[${type.width}]; + holder.buffer.getBytes(holder.start, bytes, 0, ${type.width}); + ${friendlyType} value = new BigDecimal(new BigInteger(bytes), holder.scale); + return value; + <#elseif minor.class == "Decimal256"> + byte[] bytes = new byte[${type.width}]; + holder.buffer.getBytes(holder.start, bytes, 0, ${type.width}); + ${friendlyType} value = new BigDecimal(new BigInteger(bytes), holder.scale); + return value; + <#elseif minor.class == "FixedSizeBinary"> + byte[] value = new byte [holder.byteWidth]; + holder.buffer.getBytes(0, value, 0, holder.byteWidth); + return value; + <#elseif minor.class == "TimeStampSec"> + final long millis = java.util.concurrent.TimeUnit.SECONDS.toMillis(holder.value); + return DateUtility.getLocalDateTimeFromEpochMilli(millis); + <#elseif minor.class == "TimeStampMilli" || minor.class == "DateMilli" || minor.class == "TimeMilli"> + return DateUtility.getLocalDateTimeFromEpochMilli(holder.value); + <#elseif minor.class == "TimeStampMicro"> + return DateUtility.getLocalDateTimeFromEpochMicro(holder.value); + <#elseif minor.class == "TimeStampNano"> + return DateUtility.getLocalDateTimeFromEpochNano(holder.value); + <#else> + ${friendlyType} value = new ${friendlyType}(this.holder.value); + return value; + + } + + @Override + public Object readObject() { + return read${safeType}(); + } + + <#if nullMode != "Nullable"> + public void copyAsValue(${minor.class?cap_first}Writer writer){ + writer.write(holder); + } + +} + + + + diff --git a/java/vector/target/classes/codegen/templates/NullReader.java b/java/vector/target/classes/codegen/templates/NullReader.java new file mode 100644 index 000000000000..0c65f9a56bfa --- /dev/null +++ b/java/vector/target/classes/codegen/templates/NullReader.java @@ -0,0 +1,147 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.arrow.vector.types.pojo.ArrowType.Null; +import org.apache.arrow.vector.types.pojo.Field; + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/NullReader.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + +/** + * Source code generated using FreeMarker template ${.template_name} + */ +@SuppressWarnings("unused") +public class NullReader extends AbstractBaseReader implements FieldReader{ + + public static final NullReader INSTANCE = new NullReader(); + public static final NullReader EMPTY_LIST_INSTANCE = new NullReader(MinorType.NULL); + public static final NullReader EMPTY_STRUCT_INSTANCE = new NullReader(MinorType.STRUCT); + private MinorType type; + + private NullReader(){ + super(); + type = MinorType.NULL; + } + + private NullReader(MinorType type){ + super(); + this.type = type; + } + + @Override + public MinorType getMinorType() { + return type; + } + + @Override + public Field getField() { + return new Field("", FieldType.nullable(new Null()), null); + } + + public void copyAsValue(StructWriter writer) {} + + public void copyAsValue(ListWriter writer) {} + + public void copyAsValue(UnionWriter writer) {} + + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> + public void read(${name}Holder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(Nullable${name}Holder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, ${name}Holder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(${minor.class}Writer writer){} + public void copyAsField(String name, ${minor.class}Writer writer){} + + public void read(int arrayIndex, Nullable${name}Holder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + + public int size(){ + return 0; + } + + public boolean isSet(){ + return false; + } + + public boolean next(){ + return false; + } + + public RepeatedStructReader struct(){ + return this; + } + + public RepeatedListReader list(){ + return this; + } + + public StructReader struct(String name){ + return this; + } + + public ListReader list(String name){ + return this; + } + + public FieldReader reader(String name){ + return this; + } + + public FieldReader reader(){ + return this; + } + + private void fail(String name){ + throw new IllegalArgumentException(String.format("You tried to read a %s type when you are using a ValueReader of type %s.", name, this.getClass().getSimpleName())); + } + + <#list ["Object", "BigDecimal", "Short", "Integer", "Long", "Boolean", + "LocalDateTime", "Duration", "Period", "Double", "Float", + "Character", "Text", "String", "Byte", "byte[]", "PeriodDuration"] as friendlyType> + <#assign safeType=friendlyType /> + <#if safeType=="byte[]"><#assign safeType="ByteArray" /> + + public ${friendlyType} read${safeType}(int arrayIndex){ + return null; + } + + public ${friendlyType} read${safeType}(){ + return null; + } + + +} + + + diff --git a/java/vector/target/classes/codegen/templates/StructWriters.java b/java/vector/target/classes/codegen/templates/StructWriters.java new file mode 100644 index 000000000000..b6dd2b75c526 --- /dev/null +++ b/java/vector/target/classes/codegen/templates/StructWriters.java @@ -0,0 +1,351 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +<@pp.dropOutputFile /> +<#list ["Nullable", "Single"] as mode> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/${mode}StructWriter.java" /> +<#assign index = "idx()"> +<#if mode == "Single"> +<#assign containerClass = "NonNullableStructVector" /> +<#else> +<#assign containerClass = "StructVector" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> +import java.util.Map; +import java.util.HashMap; + +import org.apache.arrow.vector.holders.RepeatedStructHolder; +import org.apache.arrow.vector.AllocationHelper; +import org.apache.arrow.vector.complex.reader.FieldReader; +import org.apache.arrow.vector.complex.writer.FieldWriter; + +<#function is_timestamp_tz type> + <#return type?starts_with("TimeStamp") && type?ends_with("TZ")> + + +/* + * This class is generated using FreeMarker and the ${.template_name} template. + */ +@SuppressWarnings("unused") +public class ${mode}StructWriter extends AbstractFieldWriter { + + protected final ${containerClass} container; + private int initialCapacity; + private final Map fields = new HashMap<>(); + public ${mode}StructWriter(${containerClass} container) { + <#if mode == "Single"> + if (container instanceof StructVector) { + throw new IllegalArgumentException("Invalid container: " + container); + } + + this.container = container; + this.initialCapacity = 0; + for (Field child : container.getField().getChildren()) { + MinorType minorType = Types.getMinorTypeForArrowType(child.getType()); + switch (minorType) { + case STRUCT: + struct(child.getName()); + break; + case LIST: + list(child.getName()); + break; + case MAP: { + ArrowType.Map arrowType = (ArrowType.Map) child.getType(); + map(child.getName(), arrowType.getKeysSorted()); + break; + } + case DENSEUNION: { + FieldType fieldType = new FieldType(addVectorAsNullable, MinorType.DENSEUNION.getType(), null, null); + DenseUnionWriter writer = new DenseUnionWriter(container.addOrGet(child.getName(), fieldType, DenseUnionVector.class), getNullableStructWriterFactory()); + fields.put(handleCase(child.getName()), writer); + break; + } + case UNION: + FieldType fieldType = new FieldType(addVectorAsNullable, MinorType.UNION.getType(), null, null); + UnionWriter writer = new UnionWriter(container.addOrGet(child.getName(), fieldType, UnionVector.class), getNullableStructWriterFactory()); + fields.put(handleCase(child.getName()), writer); + break; +<#list vv.types as type><#list type.minor as minor> +<#assign lowerName = minor.class?uncap_first /> +<#if lowerName == "int" ><#assign lowerName = "integer" /> +<#assign upperName = minor.class?upper_case /> + case ${upperName}: { + <#if minor.typeParams?? > + ${minor.arrowType} arrowType = (${minor.arrowType})child.getType(); + ${lowerName}(child.getName()<#list minor.typeParams as typeParam>, arrowType.get${typeParam.name?cap_first}()); + <#else> + ${lowerName}(child.getName()); + + break; + } + + default: + throw new UnsupportedOperationException("Unknown type: " + minorType); + } + } + } + + protected String handleCase(final String input) { + return input.toLowerCase(); + } + + protected NullableStructWriterFactory getNullableStructWriterFactory() { + return NullableStructWriterFactory.getNullableStructWriterFactoryInstance(); + } + + @Override + public int getValueCapacity() { + return container.getValueCapacity(); + } + + public void setInitialCapacity(int initialCapacity) { + this.initialCapacity = initialCapacity; + container.setInitialCapacity(initialCapacity); + } + + @Override + public boolean isEmptyStruct() { + return 0 == container.size(); + } + + @Override + public Field getField() { + return container.getField(); + } + + @Override + public StructWriter struct(String name) { + String finalName = handleCase(name); + FieldWriter writer = fields.get(finalName); + if(writer == null){ + int vectorCount=container.size(); + FieldType fieldType = new FieldType(addVectorAsNullable, MinorType.STRUCT.getType(), null, null); + StructVector vector = container.addOrGet(name, fieldType, StructVector.class); + writer = new PromotableWriter(vector, container, getNullableStructWriterFactory()); + if(vectorCount != container.size()) { + writer.allocate(); + } + writer.setPosition(idx()); + fields.put(finalName, writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.STRUCT); + } + } + return writer; + } + + @Override + public void close() throws Exception { + clear(); + container.close(); + } + + @Override + public void allocate() { + container.allocateNew(); + for(final FieldWriter w : fields.values()) { + w.allocate(); + } + } + + @Override + public void clear() { + container.clear(); + for(final FieldWriter w : fields.values()) { + w.clear(); + } + } + + @Override + public ListWriter list(String name) { + String finalName = handleCase(name); + FieldWriter writer = fields.get(finalName); + int vectorCount = container.size(); + if(writer == null) { + FieldType fieldType = new FieldType(addVectorAsNullable, MinorType.LIST.getType(), null, null); + writer = new PromotableWriter(container.addOrGet(name, fieldType, ListVector.class), container, getNullableStructWriterFactory()); + if (container.size() > vectorCount) { + writer.allocate(); + } + writer.setPosition(idx()); + fields.put(finalName, writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.LIST); + } + } + return writer; + } + + @Override + public MapWriter map(String name) { + return map(name, false); + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + MapVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + new ArrowType.Map(keysSorted) + ,null, null), + MapVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.MAP, new ArrowType.Map(keysSorted)); + } + } + return writer; + } + + public void setValueCount(int count) { + container.setValueCount(count); + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + for(final FieldWriter w: fields.values()) { + w.setPosition(index); + } + } + + <#if mode="Nullable"> + @Override + public void writeNull() { + container.setNull(idx()); + setValueCount(idx()+1); + super.setPosition(idx()+1); + } + + + @Override + public void start() { + <#if mode == "Single"> + <#else> + container.setIndexDefined(idx()); + + } + + @Override + public void end() { + setPosition(idx()+1); + } + + <#list vv.types as type><#list type.minor as minor> + <#assign lowerName = minor.class?uncap_first /> + <#if lowerName == "int" ><#assign lowerName = "integer" /> + <#assign upperName = minor.class?upper_case /> + <#assign capName = minor.class?cap_first /> + <#assign vectName = capName /> + + <#if minor.typeParams?? > + @Override + public ${minor.class}Writer ${lowerName}(String name) { + // returns existing writer + final FieldWriter writer = fields.get(handleCase(name)); + Preconditions.checkNotNull(writer); + return writer; + } + + @Override + public ${minor.class}Writer ${lowerName}(String name<#list minor.typeParams as typeParam>, ${typeParam.type} ${typeParam.name}) { + <#else> + @Override + public ${minor.class}Writer ${lowerName}(String name) { + + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + ${vectName}Vector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + <#if minor.typeParams??> + <#if minor.arrowTypeConstructorParams??> + <#assign constructorParams = minor.arrowTypeConstructorParams /> + <#else> + <#assign constructorParams = [] /> + <#list minor.typeParams?reverse as typeParam> + <#assign constructorParams = constructorParams + [ typeParam.name ] /> + + + new ${minor.arrowType}(${constructorParams?join(", ")}<#if minor.class?starts_with("Decimal")>, ${vectName}Vector.TYPE_WIDTH * 8) + <#else> + MinorType.${upperName}.getType() + + ,null, null), + ${vectName}Vector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + <#if minor.class?starts_with("Decimal")> + ((PromotableWriter)writer).getWriter(MinorType.${upperName}<#if minor.class?starts_with("Decimal")>, new ${minor.arrowType}(precision, scale, ${vectName}Vector.TYPE_WIDTH * 8)); + <#elseif is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + <#if minor.arrowTypeConstructorParams??> + <#assign constructorParams = minor.arrowTypeConstructorParams /> + <#else> + <#assign constructorParams = [] /> + <#list minor.typeParams?reverse as typeParam> + <#assign constructorParams = constructorParams + [ typeParam.name ] /> + + + ArrowType arrowType = new ${minor.arrowType}(${constructorParams?join(", ")}); + ((PromotableWriter)writer).getWriter(MinorType.${upperName}, arrowType); + <#else> + ((PromotableWriter)writer).getWriter(MinorType.${upperName}); + + } + } + return writer; + } + + + +} + diff --git a/java/vector/target/classes/codegen/templates/UnionFixedSizeListWriter.java b/java/vector/target/classes/codegen/templates/UnionFixedSizeListWriter.java new file mode 100644 index 000000000000..3436e3a96765 --- /dev/null +++ b/java/vector/target/classes/codegen/templates/UnionFixedSizeListWriter.java @@ -0,0 +1,375 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.arrow.memory.ArrowBuf; +import org.apache.arrow.vector.complex.writer.Decimal256Writer; +import org.apache.arrow.vector.complex.writer.DecimalWriter; +import org.apache.arrow.vector.holders.Decimal256Holder; +import org.apache.arrow.vector.holders.DecimalHolder; + + +import java.lang.UnsupportedOperationException; +import java.math.BigDecimal; + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/UnionFixedSizeListWriter.java" /> + + +<#include "/@includes/license.ftl" /> + + package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + +/* + * This class is generated using freemarker and the ${.template_name} template. + */ + +@SuppressWarnings("unused") +public class UnionFixedSizeListWriter extends AbstractFieldWriter { + + protected FixedSizeListVector vector; + protected PromotableWriter writer; + private boolean inStruct = false; + private String structName; + private final int listSize; + + public UnionFixedSizeListWriter(FixedSizeListVector vector) { + this(vector, NullableStructWriterFactory.getNullableStructWriterFactoryInstance()); + } + + public UnionFixedSizeListWriter(FixedSizeListVector vector, NullableStructWriterFactory nullableStructWriterFactory) { + this.vector = vector; + this.writer = new PromotableWriter(vector.getDataVector(), vector, nullableStructWriterFactory); + this.listSize = vector.getListSize(); + } + + public UnionFixedSizeListWriter(FixedSizeListVector vector, AbstractFieldWriter parent) { + this(vector); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + public Field getField() { + return vector.getField(); + } + + public void setValueCount(int count) { + vector.setValueCount(count); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void close() throws Exception { + vector.close(); + writer.close(); + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + } + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if uncappedName == "int" ><#assign uncappedName = "integer" /> + <#if !minor.typeParams?? > + + @Override + public ${name}Writer ${uncappedName}() { + return this; + } + + @Override + public ${name}Writer ${uncappedName}(String name) { + structName = name; + return writer.${uncappedName}(name); + } + + + + @Override + public DecimalWriter decimal() { + return this; + } + + @Override + public DecimalWriter decimal(String name, int scale, int precision) { + return writer.decimal(name, scale, precision); + } + + @Override + public DecimalWriter decimal(String name) { + return writer.decimal(name); + } + + + @Override + public Decimal256Writer decimal256() { + return this; + } + + @Override + public Decimal256Writer decimal256(String name, int scale, int precision) { + return writer.decimal256(name, scale, precision); + } + + @Override + public Decimal256Writer decimal256(String name) { + return writer.decimal256(name); + } + + @Override + public StructWriter struct() { + inStruct = true; + return this; + } + + @Override + public ListWriter list() { + return writer; + } + + @Override + public ListWriter list(String name) { + ListWriter listWriter = writer.list(name); + return listWriter; + } + + @Override + public StructWriter struct(String name) { + StructWriter structWriter = writer.struct(name); + return structWriter; + } + + @Override + public MapWriter map() { + return writer; + } + + @Override + public MapWriter map(String name) { + MapWriter mapWriter = writer.map(name); + return mapWriter; + } + + @Override + public MapWriter map(boolean keysSorted) { + writer.map(keysSorted); + return writer; + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + MapWriter mapWriter = writer.map(name, keysSorted); + return mapWriter; + } + + @Override + public void startList() { + int start = vector.startNewValue(idx()); + writer.setPosition(start); + } + + @Override + public void endList() { + setPosition(idx() + 1); + } + + @Override + public void start() { + writer.start(); + } + + @Override + public void end() { + writer.end(); + inStruct = false; + } + + @Override + public void write(DecimalHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write(holder); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write(Decimal256Holder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write(holder); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeNull() { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeNull(); + } + + public void writeDecimal(long start, ArrowBuf buffer, ArrowType arrowType) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeDecimal(start, buffer, arrowType); + writer.setPosition(writer.idx() + 1); + } + + public void writeDecimal(BigDecimal value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeDecimal(value); + writer.setPosition(writer.idx() + 1); + } + + public void writeBigEndianBytesToDecimal(byte[] value, ArrowType arrowType) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeBigEndianBytesToDecimal(value, arrowType); + writer.setPosition(writer.idx() + 1); + } + + public void writeDecimal256(long start, ArrowBuf buffer, ArrowType arrowType) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeDecimal256(start, buffer, arrowType); + writer.setPosition(writer.idx() + 1); + } + + public void writeDecimal256(BigDecimal value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeDecimal256(value); + writer.setPosition(writer.idx() + 1); + } + + public void writeBigEndianBytesToDecimal256(byte[] value, ArrowType arrowType) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeBigEndianBytesToDecimal256(value, arrowType); + writer.setPosition(writer.idx() + 1); + } + + + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if minor.class?ends_with("VarBinary")> + @Override + public void write${minor.class}(byte[] value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write${minor.class}(byte[] value, int offset, int length) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write${minor.class}(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write${minor.class}(ByteBuffer value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write${minor.class}(ByteBuffer value, int offset, int length) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write${minor.class}(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + <#elseif minor.class?ends_with("VarChar")> + @Override + public void write${minor.class}(Text value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write${minor.class}(String value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + + <#if !minor.typeParams?? > + @Override + public void write${name}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, ) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write${name}(<#list fields as field>${field.name}<#if field_has_next>, ); + writer.setPosition(writer.idx() + 1); + } + + public void write(${name}Holder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write${name}(<#list fields as field>holder.${field.name}<#if field_has_next>, ); + writer.setPosition(writer.idx() + 1); + } + + + + +} diff --git a/java/vector/target/classes/codegen/templates/UnionListWriter.java b/java/vector/target/classes/codegen/templates/UnionListWriter.java new file mode 100644 index 000000000000..5c0565ee2717 --- /dev/null +++ b/java/vector/target/classes/codegen/templates/UnionListWriter.java @@ -0,0 +1,319 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.arrow.memory.ArrowBuf; +import org.apache.arrow.vector.complex.writer.Decimal256Writer; +import org.apache.arrow.vector.complex.writer.DecimalWriter; +import org.apache.arrow.vector.holders.Decimal256Holder; +import org.apache.arrow.vector.holders.DecimalHolder; + + +import java.lang.UnsupportedOperationException; +import java.math.BigDecimal; + +<@pp.dropOutputFile /> +<#list ["List", "LargeList"] as listName> + +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/Union${listName}Writer.java" /> + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +import static org.apache.arrow.memory.util.LargeMemoryUtil.checkedCastToInt; +<#include "/@includes/vv_imports.ftl" /> + +<#function is_timestamp_tz type> + <#return type?starts_with("TimeStamp") && type?ends_with("TZ")> + + +/* + * This class is generated using freemarker and the ${.template_name} template. + */ + +@SuppressWarnings("unused") +public class Union${listName}Writer extends AbstractFieldWriter { + + protected ${listName}Vector vector; + protected PromotableWriter writer; + private boolean inStruct = false; + private boolean listStarted = false; + private String structName; + <#if listName == "LargeList"> + private static final long OFFSET_WIDTH = 8; + <#else> + private static final int OFFSET_WIDTH = 4; + + + public Union${listName}Writer(${listName}Vector vector) { + this(vector, NullableStructWriterFactory.getNullableStructWriterFactoryInstance()); + } + + public Union${listName}Writer(${listName}Vector vector, NullableStructWriterFactory nullableStructWriterFactory) { + this.vector = vector; + this.writer = new PromotableWriter(vector.getDataVector(), vector, nullableStructWriterFactory); + } + + public Union${listName}Writer(${listName}Vector vector, AbstractFieldWriter parent) { + this(vector); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + public Field getField() { + return vector.getField(); + } + + public void setValueCount(int count) { + vector.setValueCount(count); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void close() throws Exception { + vector.close(); + writer.close(); + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + } + + <#list vv.types as type><#list type.minor as minor> + <#assign lowerName = minor.class?uncap_first /> + <#if lowerName == "int" ><#assign lowerName = "integer" /> + <#assign upperName = minor.class?upper_case /> + <#assign capName = minor.class?cap_first /> + <#assign vectName = capName /> + @Override + public ${minor.class}Writer ${lowerName}() { + return this; + } + + <#if minor.typeParams?? > + @Override + public ${minor.class}Writer ${lowerName}(String name<#list minor.typeParams as typeParam>, ${typeParam.type} ${typeParam.name}) { + return writer.${lowerName}(name<#list minor.typeParams as typeParam>, ${typeParam.name}); + } + + + @Override + public ${minor.class}Writer ${lowerName}(String name) { + structName = name; + return writer.${lowerName}(name); + } + + + + @Override + public StructWriter struct() { + inStruct = true; + return this; + } + + @Override + public ListWriter list() { + return writer; + } + + @Override + public ListWriter list(String name) { + ListWriter listWriter = writer.list(name); + return listWriter; + } + + @Override + public StructWriter struct(String name) { + StructWriter structWriter = writer.struct(name); + return structWriter; + } + + @Override + public MapWriter map() { + return writer; + } + + @Override + public MapWriter map(String name) { + MapWriter mapWriter = writer.map(name); + return mapWriter; + } + + @Override + public MapWriter map(boolean keysSorted) { + writer.map(keysSorted); + return writer; + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + MapWriter mapWriter = writer.map(name, keysSorted); + return mapWriter; + } + + <#if listName == "LargeList"> + @Override + public void startList() { + vector.startNewValue(idx()); + writer.setPosition(checkedCastToInt(vector.getOffsetBuffer().getLong((idx() + 1L) * OFFSET_WIDTH))); + listStarted = true; + } + + @Override + public void endList() { + vector.getOffsetBuffer().setLong((idx() + 1L) * OFFSET_WIDTH, writer.idx()); + setPosition(idx() + 1); + listStarted = false; + } + <#else> + @Override + public void startList() { + vector.startNewValue(idx()); + writer.setPosition(vector.getOffsetBuffer().getInt((idx() + 1L) * OFFSET_WIDTH)); + listStarted = true; + } + + @Override + public void endList() { + vector.getOffsetBuffer().setInt((idx() + 1L) * OFFSET_WIDTH, writer.idx()); + setPosition(idx() + 1); + listStarted = false; + } + + + @Override + public void start() { + writer.start(); + } + + @Override + public void end() { + writer.end(); + inStruct = false; + } + + @Override + public void writeNull() { + if (!listStarted){ + vector.setNull(idx()); + } else { + writer.writeNull(); + } + } + + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + @Override + public void write${name}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, ) { + writer.write${name}(<#list fields as field>${field.name}<#if field_has_next>, ); + writer.setPosition(writer.idx()+1); + } + + <#if is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + @Override + public void write(${name}Holder holder) { + writer.write(holder); + writer.setPosition(writer.idx()+1); + } + + <#elseif minor.class?starts_with("Decimal")> + public void write${name}(long start, ArrowBuf buffer, ArrowType arrowType) { + writer.write${name}(start, buffer, arrowType); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(${name}Holder holder) { + writer.write(holder); + writer.setPosition(writer.idx()+1); + } + + public void write${name}(BigDecimal value) { + writer.write${name}(value); + writer.setPosition(writer.idx()+1); + } + + public void writeBigEndianBytesTo${name}(byte[] value, ArrowType arrowType){ + writer.writeBigEndianBytesTo${name}(value, arrowType); + writer.setPosition(writer.idx() + 1); + } + <#else> + @Override + public void write(${name}Holder holder) { + writer.write${name}(<#list fields as field>holder.${field.name}<#if field_has_next>, ); + writer.setPosition(writer.idx()+1); + } + + + <#if minor.class?ends_with("VarBinary")> + @Override + public void write${minor.class}(byte[] value) { + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write${minor.class}(byte[] value, int offset, int length) { + writer.write${minor.class}(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write${minor.class}(ByteBuffer value) { + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write${minor.class}(ByteBuffer value, int offset, int length) { + writer.write${minor.class}(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + <#elseif minor.class?ends_with("VarChar")> + @Override + public void write${minor.class}(Text value) { + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + public void write${minor.class}(String value) { + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + + + +} + diff --git a/java/vector/target/classes/codegen/templates/UnionMapWriter.java b/java/vector/target/classes/codegen/templates/UnionMapWriter.java new file mode 100644 index 000000000000..606f880377be --- /dev/null +++ b/java/vector/target/classes/codegen/templates/UnionMapWriter.java @@ -0,0 +1,222 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.arrow.memory.ArrowBuf; +import org.apache.arrow.vector.complex.writer.Decimal256Writer; +import org.apache.arrow.vector.complex.writer.DecimalWriter; +import org.apache.arrow.vector.holders.Decimal256Holder; +import org.apache.arrow.vector.holders.DecimalHolder; + +import java.lang.UnsupportedOperationException; +import java.math.BigDecimal; + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/UnionMapWriter.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + +/* + * This class is generated using freemarker and the ${.template_name} template. + */ + +/** + *

Writer for MapVectors. This extends UnionListWriter to simplify writing map entries to a list + * of struct elements, with "key" and "value" fields. The procedure for writing a map begin with + * {@link #startMap()} followed by {@link #startEntry()}. An entry is written by using the + * {@link #key()} writer to write the key, then the {@link #value()} writer to write a value. After + * writing the value, call {@link #endEntry()} to complete the entry. Each map can have 1 or more + * entries. When done writing entries, call {@link #endMap()} to complete the map. + * + *

NOTE: the MapVector can have NULL values by not writing to position. If a map is started with + * {@link #startMap()}, then it must have a key written. The value of a map entry can be NULL by + * not using the {@link #value()} writer. + * + *

Example to write the following map to position 5 of a vector + *

{@code
+ *   // {
+ *   //   1 -> 3,
+ *   //   2 -> 4,
+ *   //   3 -> NULL
+ *   // }
+ *
+ *   UnionMapWriter writer = ...
+ *
+ *   writer.setPosition(5);
+ *   writer.startMap();
+ *   writer.startEntry();
+ *   writer.key().integer().writeInt(1);
+ *   writer.value().integer().writeInt(3);
+ *   writer.endEntry();
+ *   writer.startEntry();
+ *   writer.key().integer().writeInt(2);
+ *   writer.value().integer().writeInt(4);
+ *   writer.endEntry();
+ *   writer.startEntry();
+ *   writer.key().integer().writeInt(3);
+ *   writer.endEntry();
+ *   writer.endMap();
+ * 
+ *

+ */ +@SuppressWarnings("unused") +public class UnionMapWriter extends UnionListWriter { + + /** + * Current mode for writing map entries, set by calling {@link #key()} or {@link #value()} + * and reset with a call to {@link #endEntry()}. With KEY mode, a struct writer with field + * named "key" is returned. With VALUE mode, a struct writer with field named "value" is + * returned. In OFF mode, the writer will behave like a standard UnionListWriter + */ + private enum MapWriteMode { + OFF, + KEY, + VALUE, + } + + private MapWriteMode mode = MapWriteMode.OFF; + private StructWriter entryWriter; + + public UnionMapWriter(MapVector vector) { + super(vector); + entryWriter = struct(); + } + + /** Start writing a map that consists of 1 or more entries. */ + public void startMap() { + startList(); + } + + /** Complete the map. */ + public void endMap() { + endList(); + } + + /** + * Start a map entry that should be followed by calls to {@link #key()} and {@link #value()} + * writers. Call {@link #endEntry()} to complete the entry. + */ + public void startEntry() { + writer.setAddVectorAsNullable(false); + entryWriter.start(); + } + + /** Complete the map entry. */ + public void endEntry() { + entryWriter.end(); + mode = MapWriteMode.OFF; + writer.setAddVectorAsNullable(true); + } + + /** Return the key writer that is used to write to the "key" field. */ + public UnionMapWriter key() { + writer.setAddVectorAsNullable(false); + mode = MapWriteMode.KEY; + return this; + } + + /** Return the value writer that is used to write to the "value" field. */ + public UnionMapWriter value() { + writer.setAddVectorAsNullable(true); + mode = MapWriteMode.VALUE; + return this; + } + + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if uncappedName == "int" ><#assign uncappedName = "integer" /> + <#if !minor.typeParams?? > + @Override + public ${name}Writer ${uncappedName}() { + switch (mode) { + case KEY: + return entryWriter.${uncappedName}(MapVector.KEY_NAME); + case VALUE: + return entryWriter.${uncappedName}(MapVector.VALUE_NAME); + default: + return this; + } + } + + + + @Override + public DecimalWriter decimal() { + switch (mode) { + case KEY: + return entryWriter.decimal(MapVector.KEY_NAME); + case VALUE: + return entryWriter.decimal(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public Decimal256Writer decimal256() { + switch (mode) { + case KEY: + return entryWriter.decimal256(MapVector.KEY_NAME); + case VALUE: + return entryWriter.decimal256(MapVector.VALUE_NAME); + default: + return this; + } + } + + + @Override + public StructWriter struct() { + switch (mode) { + case KEY: + return entryWriter.struct(MapVector.KEY_NAME); + case VALUE: + return entryWriter.struct(MapVector.VALUE_NAME); + default: + return super.struct(); + } + } + + @Override + public ListWriter list() { + switch (mode) { + case KEY: + return entryWriter.list(MapVector.KEY_NAME); + case VALUE: + return entryWriter.list(MapVector.VALUE_NAME); + default: + return super.list(); + } + } + + @Override + public MapWriter map(boolean keysSorted) { + switch (mode) { + case KEY: + return entryWriter.map(MapVector.KEY_NAME, keysSorted); + case VALUE: + return entryWriter.map(MapVector.VALUE_NAME, keysSorted); + default: + return super.map(); + } + } +} diff --git a/java/vector/target/classes/codegen/templates/UnionReader.java b/java/vector/target/classes/codegen/templates/UnionReader.java new file mode 100644 index 000000000000..822d4822987f --- /dev/null +++ b/java/vector/target/classes/codegen/templates/UnionReader.java @@ -0,0 +1,230 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +import org.apache.arrow.vector.types.Types.MinorType; +import org.apache.arrow.vector.types.pojo.Field; + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/UnionReader.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + +<#function is_timestamp_tz type> + <#return type?starts_with("TimeStamp") && type?ends_with("TZ")> + + +/** + * Source code generated using FreeMarker template ${.template_name} + */ +@SuppressWarnings("unused") +public class UnionReader extends AbstractFieldReader { + + private static final int NUM_SUPPORTED_TYPES = 46; + + private BaseReader[] readers = new BaseReader[NUM_SUPPORTED_TYPES]; + public UnionVector data; + + public UnionReader(UnionVector data) { + this.data = data; + } + + public MinorType getMinorType() { + return TYPES[data.getTypeValue(idx())]; + } + + private static MinorType[] TYPES = new MinorType[NUM_SUPPORTED_TYPES]; + + static { + for (MinorType minorType : MinorType.values()) { + TYPES[minorType.ordinal()] = minorType; + } + } + + @Override + public Field getField() { + return data.getField(); + } + + public boolean isSet(){ + return !data.isNull(idx()); + } + + public void read(UnionHolder holder) { + holder.reader = this; + holder.isSet = this.isSet() ? 1 : 0; + } + + public void read(int index, UnionHolder holder) { + getList().read(index, holder); + } + + private FieldReader getReaderForIndex(int index) { + int typeValue = data.getTypeValue(index); + FieldReader reader = (FieldReader) readers[typeValue]; + if (reader != null) { + return reader; + } + switch (MinorType.values()[typeValue]) { + case NULL: + return NullReader.INSTANCE; + case STRUCT: + return (FieldReader) getStruct(); + case LIST: + return (FieldReader) getList(); + case MAP: + return (FieldReader) getMap(); + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + case ${name?upper_case}: + return (FieldReader) get${name}(); + + + + default: + throw new UnsupportedOperationException("Unsupported type: " + MinorType.values()[typeValue]); + } + } + + private SingleStructReaderImpl structReader; + + private StructReader getStruct() { + if (structReader == null) { + structReader = (SingleStructReaderImpl) data.getStruct().getReader(); + structReader.setPosition(idx()); + readers[MinorType.STRUCT.ordinal()] = structReader; + } + return structReader; + } + + private UnionListReader listReader; + + private FieldReader getList() { + if (listReader == null) { + listReader = new UnionListReader(data.getList()); + listReader.setPosition(idx()); + readers[MinorType.LIST.ordinal()] = listReader; + } + return listReader; + } + + private UnionMapReader mapReader; + + private FieldReader getMap() { + if (mapReader == null) { + mapReader = new UnionMapReader(data.getMap()); + mapReader.setPosition(idx()); + readers[MinorType.MAP.ordinal()] = mapReader; + } + return mapReader; + } + + @Override + public java.util.Iterator iterator() { + return getStruct().iterator(); + } + + @Override + public void copyAsValue(UnionWriter writer) { + writer.data.copyFrom(idx(), writer.idx(), data); + } + + <#list ["Object", "BigDecimal", "Short", "Integer", "Long", "Boolean", + "LocalDateTime", "Duration", "Period", "Double", "Float", + "Character", "Text", "Byte", "byte[]", "PeriodDuration"] as friendlyType> + <#assign safeType=friendlyType /> + <#if safeType=="byte[]"><#assign safeType="ByteArray" /> + + @Override + public ${friendlyType} read${safeType}() { + return getReaderForIndex(idx()).read${safeType}(); + } + + + + public int size() { + return getReaderForIndex(idx()).size(); + } + + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign uncappedName = name?uncap_first/> + <#assign boxedType = (minor.boxedType!type.boxedType) /> + <#assign javaType = (minor.javaType!type.javaType) /> + <#assign friendlyType = (minor.friendlyType!minor.boxedType!type.boxedType) /> + <#assign safeType=friendlyType /> + <#if safeType=="byte[]"><#assign safeType="ByteArray" /> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + + private ${name}ReaderImpl ${uncappedName}Reader; + + private ${name}ReaderImpl get${name}() { + if (${uncappedName}Reader == null) { + ${uncappedName}Reader = new ${name}ReaderImpl(data.get${name}Vector()); + ${uncappedName}Reader.setPosition(idx()); + readers[MinorType.${name?upper_case}.ordinal()] = ${uncappedName}Reader; + } + return ${uncappedName}Reader; + } + + public void read(Nullable${name}Holder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(${name}Writer writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + + + + @Override + public void copyAsValue(ListWriter writer) { + ComplexCopier.copy(this, (FieldWriter) writer); + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + for (BaseReader reader : readers) { + if (reader != null) { + reader.setPosition(index); + } + } + } + + public FieldReader reader(String name){ + return getStruct().reader(name); + } + + public FieldReader reader() { + return getList().reader(); + } + + public boolean next() { + return getReaderForIndex(idx()).next(); + } +} diff --git a/java/vector/target/classes/codegen/templates/UnionVector.java b/java/vector/target/classes/codegen/templates/UnionVector.java new file mode 100644 index 000000000000..ea79c5c2fba7 --- /dev/null +++ b/java/vector/target/classes/codegen/templates/UnionVector.java @@ -0,0 +1,911 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.arrow.memory.ArrowBuf; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.memory.ReferenceManager; +import org.apache.arrow.memory.util.CommonUtil; +import org.apache.arrow.memory.util.hash.ArrowBufHasher; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.BaseValueVector; +import org.apache.arrow.vector.BitVectorHelper; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.ValueVector; +import org.apache.arrow.vector.complex.AbstractStructVector; +import org.apache.arrow.vector.complex.NonNullableStructVector; +import org.apache.arrow.vector.complex.StructVector; +import org.apache.arrow.vector.compare.VectorVisitor; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.UnionMode; +import org.apache.arrow.vector.compare.RangeEqualsVisitor; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.types.pojo.FieldType; +import org.apache.arrow.vector.util.CallBack; +import org.apache.arrow.vector.util.DataSizeRoundingUtil; + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/UnionVector.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex; + +<#include "/@includes/vv_imports.ftl" /> +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import org.apache.arrow.memory.ArrowBuf; +import org.apache.arrow.memory.util.CommonUtil; +import org.apache.arrow.vector.compare.VectorVisitor; +import org.apache.arrow.vector.complex.impl.ComplexCopier; +import org.apache.arrow.vector.util.CallBack; +import org.apache.arrow.vector.util.ValueVectorUtility; +import org.apache.arrow.vector.ipc.message.ArrowFieldNode; +import org.apache.arrow.memory.util.ArrowBufPointer; +import org.apache.arrow.memory.util.hash.ArrowBufHasher; +import org.apache.arrow.vector.BaseValueVector; +import org.apache.arrow.vector.util.OversizedAllocationException; +import org.apache.arrow.util.Preconditions; + +import static org.apache.arrow.vector.types.UnionMode.Sparse; +import static org.apache.arrow.memory.util.LargeMemoryUtil.checkedCastToInt; +import static org.apache.arrow.memory.util.LargeMemoryUtil.capAtMaxInt; + +<#function is_timestamp_tz type> + <#return type?starts_with("TimeStamp") && type?ends_with("TZ")> + + +/* + * This class is generated using freemarker and the ${.template_name} template. + */ +@SuppressWarnings("unused") + + +/** + * A vector which can hold values of different types. It does so by using a StructVector which contains a vector for each + * primitive type that is stored. StructVector is used in order to take advantage of its serialization/deserialization methods, + * as well as the addOrGet method. + * + * For performance reasons, UnionVector stores a cached reference to each subtype vector, to avoid having to do the struct lookup + * each time the vector is accessed. + * Source code generated using FreeMarker template ${.template_name} + */ +public class UnionVector extends AbstractContainerVector implements FieldVector { + int valueCount; + + NonNullableStructVector internalStruct; + protected ArrowBuf typeBuffer; + + private StructVector structVector; + private ListVector listVector; + private MapVector mapVector; + + private FieldReader reader; + + private int singleType = 0; + private ValueVector singleVector; + + private int typeBufferAllocationSizeInBytes; + + private final FieldType fieldType; + private final Field[] typeIds = new Field[Byte.MAX_VALUE + 1]; + + public static final byte TYPE_WIDTH = 1; + private static final FieldType INTERNAL_STRUCT_TYPE = new FieldType(false /*nullable*/, + ArrowType.Struct.INSTANCE, null /*dictionary*/, null /*metadata*/); + + public static UnionVector empty(String name, BufferAllocator allocator) { + FieldType fieldType = FieldType.nullable(new ArrowType.Union( + UnionMode.Sparse, null)); + return new UnionVector(name, allocator, fieldType, null); + } + + public UnionVector(String name, BufferAllocator allocator, FieldType fieldType, CallBack callBack) { + super(name, allocator, callBack); + this.fieldType = fieldType; + this.internalStruct = new NonNullableStructVector( + "internal", + allocator, + INTERNAL_STRUCT_TYPE, + callBack, + AbstractStructVector.ConflictPolicy.CONFLICT_REPLACE, + false); + this.typeBuffer = allocator.getEmpty(); + this.typeBufferAllocationSizeInBytes = BaseValueVector.INITIAL_VALUE_ALLOCATION * TYPE_WIDTH; + } + + public BufferAllocator getAllocator() { + return allocator; + } + + @Override + public MinorType getMinorType() { + return MinorType.UNION; + } + + @Override + public void initializeChildrenFromFields(List children) { + int count = 0; + for (Field child: children) { + int typeId = Types.getMinorTypeForArrowType(child.getType()).ordinal(); + if (this.fieldType != null) { + int[] typeIds = ((ArrowType.Union)this.fieldType.getType()).getTypeIds(); + if (typeIds != null) { + typeId = typeIds[count++]; + } + } + typeIds[typeId] = child; + } + internalStruct.initializeChildrenFromFields(children); + } + + @Override + public List getChildrenFromFields() { + return internalStruct.getChildrenFromFields(); + } + + @Override + public void loadFieldBuffers(ArrowFieldNode fieldNode, List ownBuffers) { + if (ownBuffers.size() != 1) { + throw new IllegalArgumentException("Illegal buffer count, expected 1, got: " + ownBuffers.size()); + } + ArrowBuf buffer = ownBuffers.get(0); + typeBuffer.getReferenceManager().release(); + typeBuffer = buffer.getReferenceManager().retain(buffer, allocator); + typeBufferAllocationSizeInBytes = checkedCastToInt(typeBuffer.capacity()); + this.valueCount = fieldNode.getLength(); + } + + @Override + public List getFieldBuffers() { + List result = new ArrayList<>(1); + setReaderAndWriterIndex(); + result.add(typeBuffer); + + return result; + } + + private void setReaderAndWriterIndex() { + typeBuffer.readerIndex(0); + typeBuffer.writerIndex(valueCount * TYPE_WIDTH); + } + + /** + * Get the inner vectors. + * + * @deprecated This API will be removed as the current implementations no longer support inner vectors. + * + * @return the inner vectors for this field as defined by the TypeLayout + */ + @Deprecated + @Override + public List getFieldInnerVectors() { + throw new UnsupportedOperationException("There are no inner vectors. Use geFieldBuffers"); + } + + private String fieldName(MinorType type) { + return type.name().toLowerCase(); + } + + private FieldType fieldType(MinorType type) { + return FieldType.nullable(type.getType()); + } + + private T addOrGet(Types.MinorType minorType, Class c) { + return addOrGet(null, minorType, c); + } + + private T addOrGet(String name, Types.MinorType minorType, ArrowType arrowType, Class c) { + return internalStruct.addOrGet(name == null ? fieldName(minorType) : name, FieldType.nullable(arrowType), c); + } + + private T addOrGet(String name, Types.MinorType minorType, Class c) { + return internalStruct.addOrGet(name == null ? fieldName(minorType) : name, fieldType(minorType), c); + } + + + @Override + public long getValidityBufferAddress() { + throw new UnsupportedOperationException(); + } + + public long getTypeBufferAddress() { + return typeBuffer.memoryAddress(); + } + + @Override + public long getDataBufferAddress() { + throw new UnsupportedOperationException(); + } + + @Override + public long getOffsetBufferAddress() { + throw new UnsupportedOperationException(); + } + + public ArrowBuf getTypeBuffer() { + return typeBuffer; + } + + @Override + public ArrowBuf getValidityBuffer() { throw new UnsupportedOperationException(); } + + @Override + public ArrowBuf getDataBuffer() { throw new UnsupportedOperationException(); } + + @Override + public ArrowBuf getOffsetBuffer() { throw new UnsupportedOperationException(); } + + public StructVector getStruct() { + if (structVector == null) { + int vectorCount = internalStruct.size(); + structVector = addOrGet(MinorType.STRUCT, StructVector.class); + if (internalStruct.size() > vectorCount) { + structVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return structVector; + } + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#assign lowerCaseName = name?lower_case/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + + private ${name}Vector ${uncappedName}Vector; + + <#if minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + public ${name}Vector get${name}Vector() { + if (${uncappedName}Vector == null) { + throw new IllegalArgumentException("No ${name} present. Provide ArrowType argument to create a new vector"); + } + return ${uncappedName}Vector; + } + public ${name}Vector get${name}Vector(ArrowType arrowType) { + return get${name}Vector(null, arrowType); + } + public ${name}Vector get${name}Vector(String name, ArrowType arrowType) { + if (${uncappedName}Vector == null) { + int vectorCount = internalStruct.size(); + ${uncappedName}Vector = addOrGet(name, MinorType.${name?upper_case}, arrowType, ${name}Vector.class); + if (internalStruct.size() > vectorCount) { + ${uncappedName}Vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return ${uncappedName}Vector; + } + <#else> + public ${name}Vector get${name}Vector() { + return get${name}Vector(null); + } + + public ${name}Vector get${name}Vector(String name) { + if (${uncappedName}Vector == null) { + int vectorCount = internalStruct.size(); + ${uncappedName}Vector = addOrGet(name, MinorType.${name?upper_case}, ${name}Vector.class); + if (internalStruct.size() > vectorCount) { + ${uncappedName}Vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return ${uncappedName}Vector; + } + + + + + + public ListVector getList() { + if (listVector == null) { + int vectorCount = internalStruct.size(); + listVector = addOrGet(MinorType.LIST, ListVector.class); + if (internalStruct.size() > vectorCount) { + listVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return listVector; + } + + public MapVector getMap() { + if (mapVector == null) { + throw new IllegalArgumentException("No map present. Provide ArrowType argument to create a new vector"); + } + return mapVector; + } + + public MapVector getMap(ArrowType arrowType) { + return getMap(null, arrowType); + } + + public MapVector getMap(String name, ArrowType arrowType) { + if (mapVector == null) { + int vectorCount = internalStruct.size(); + mapVector = addOrGet(name, MinorType.MAP, arrowType, MapVector.class); + if (internalStruct.size() > vectorCount) { + mapVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return mapVector; + } + + public int getTypeValue(int index) { + return typeBuffer.getByte(index * TYPE_WIDTH); + } + + @Override + public void allocateNew() throws OutOfMemoryException { + /* new allocation -- clear the current buffers */ + clear(); + internalStruct.allocateNew(); + try { + allocateTypeBuffer(); + } catch (Exception e) { + clear(); + throw e; + } + } + + @Override + public boolean allocateNewSafe() { + /* new allocation -- clear the current buffers */ + clear(); + boolean safe = internalStruct.allocateNewSafe(); + if (!safe) { return false; } + try { + allocateTypeBuffer(); + } catch (Exception e) { + clear(); + return false; + } + + return true; + } + + private void allocateTypeBuffer() { + typeBuffer = allocator.buffer(typeBufferAllocationSizeInBytes); + typeBuffer.readerIndex(0); + typeBuffer.setZero(0, typeBuffer.capacity()); + } + + @Override + public void reAlloc() { + internalStruct.reAlloc(); + reallocTypeBuffer(); + } + + private void reallocTypeBuffer() { + final long currentBufferCapacity = typeBuffer.capacity(); + long newAllocationSize = currentBufferCapacity * 2; + if (newAllocationSize == 0) { + if (typeBufferAllocationSizeInBytes > 0) { + newAllocationSize = typeBufferAllocationSizeInBytes; + } else { + newAllocationSize = BaseValueVector.INITIAL_VALUE_ALLOCATION * TYPE_WIDTH * 2; + } + } + newAllocationSize = CommonUtil.nextPowerOfTwo(newAllocationSize); + assert newAllocationSize >= 1; + + if (newAllocationSize > BaseValueVector.MAX_ALLOCATION_SIZE) { + throw new OversizedAllocationException("Unable to expand the buffer"); + } + + final ArrowBuf newBuf = allocator.buffer(checkedCastToInt(newAllocationSize)); + newBuf.setBytes(0, typeBuffer, 0, currentBufferCapacity); + newBuf.setZero(currentBufferCapacity, newBuf.capacity() - currentBufferCapacity); + typeBuffer.getReferenceManager().release(1); + typeBuffer = newBuf; + typeBufferAllocationSizeInBytes = (int)newAllocationSize; + } + + @Override + public void setInitialCapacity(int numRecords) { } + + @Override + public int getValueCapacity() { + return Math.min(getTypeBufferValueCapacity(), internalStruct.getValueCapacity()); + } + + @Override + public void close() { + clear(); + } + + @Override + public void clear() { + valueCount = 0; + typeBuffer.getReferenceManager().release(); + typeBuffer = allocator.getEmpty(); + internalStruct.clear(); + } + + @Override + public void reset() { + valueCount = 0; + typeBuffer.setZero(0, typeBuffer.capacity()); + internalStruct.reset(); + } + + @Override + public Field getField() { + List childFields = new ArrayList<>(); + List children = internalStruct.getChildren(); + int[] typeIds = new int[children.size()]; + for (ValueVector v : children) { + typeIds[childFields.size()] = v.getMinorType().ordinal(); + childFields.add(v.getField()); + } + + FieldType fieldType; + if (this.fieldType == null) { + fieldType = FieldType.nullable(new ArrowType.Union(Sparse, typeIds)); + } else { + final UnionMode mode = ((ArrowType.Union)this.fieldType.getType()).getMode(); + fieldType = new FieldType(this.fieldType.isNullable(), new ArrowType.Union(mode, typeIds), + this.fieldType.getDictionary(), this.fieldType.getMetadata()); + } + + return new Field(name, fieldType, childFields); + } + + @Override + public TransferPair getTransferPair(BufferAllocator allocator) { + return getTransferPair(name, allocator); + } + + @Override + public TransferPair getTransferPair(String ref, BufferAllocator allocator) { + return getTransferPair(ref, allocator, null); + } + + @Override + public TransferPair getTransferPair(String ref, BufferAllocator allocator, CallBack callBack) { + return new org.apache.arrow.vector.complex.UnionVector.TransferImpl(ref, allocator, callBack); + } + + @Override + public TransferPair getTransferPair(Field field, BufferAllocator allocator) { + return getTransferPair(field, allocator, null); + } + + @Override + public TransferPair getTransferPair(Field field, BufferAllocator allocator, CallBack callBack) { + return new org.apache.arrow.vector.complex.UnionVector.TransferImpl(field, allocator, callBack); + } + + @Override + public TransferPair makeTransferPair(ValueVector target) { + return new TransferImpl((UnionVector) target); + } + + @Override + public void copyFrom(int inIndex, int outIndex, ValueVector from) { + Preconditions.checkArgument(this.getMinorType() == from.getMinorType()); + UnionVector fromCast = (UnionVector) from; + fromCast.getReader().setPosition(inIndex); + getWriter().setPosition(outIndex); + ComplexCopier.copy(fromCast.reader, writer); + } + + @Override + public void copyFromSafe(int inIndex, int outIndex, ValueVector from) { + copyFrom(inIndex, outIndex, from); + } + + public FieldVector addVector(FieldVector v) { + final String name = v.getName().isEmpty() ? fieldName(v.getMinorType()) : v.getName(); + Preconditions.checkState(internalStruct.getChild(name) == null, String.format("%s vector already exists", name)); + final FieldVector newVector = internalStruct.addOrGet(name, v.getField().getFieldType(), v.getClass()); + v.makeTransferPair(newVector).transfer(); + internalStruct.putChild(name, newVector); + if (callBack != null) { + callBack.doWork(); + } + return newVector; + } + + /** + * Directly put a vector to internalStruct without creating a new one with same type. + */ + public void directAddVector(FieldVector v) { + String name = fieldName(v.getMinorType()); + Preconditions.checkState(internalStruct.getChild(name) == null, String.format("%s vector already exists", name)); + internalStruct.putChild(name, v); + if (callBack != null) { + callBack.doWork(); + } + } + + private class TransferImpl implements TransferPair { + private final TransferPair internalStructVectorTransferPair; + private final UnionVector to; + + public TransferImpl(String name, BufferAllocator allocator, CallBack callBack) { + to = new UnionVector(name, allocator, /* field type */ null, callBack); + internalStructVectorTransferPair = internalStruct.makeTransferPair(to.internalStruct); + } + + public TransferImpl(Field field, BufferAllocator allocator, CallBack callBack) { + to = new UnionVector(field.getName(), allocator, null, callBack); + internalStructVectorTransferPair = internalStruct.makeTransferPair(to.internalStruct); + } + + public TransferImpl(UnionVector to) { + this.to = to; + internalStructVectorTransferPair = internalStruct.makeTransferPair(to.internalStruct); + } + + @Override + public void transfer() { + to.clear(); + ReferenceManager refManager = typeBuffer.getReferenceManager(); + to.typeBuffer = refManager.transferOwnership(typeBuffer, to.allocator).getTransferredBuffer(); + internalStructVectorTransferPair.transfer(); + to.valueCount = valueCount; + clear(); + } + + @Override + public void splitAndTransfer(int startIndex, int length) { + Preconditions.checkArgument(startIndex >= 0 && length >= 0 && startIndex + length <= valueCount, + "Invalid parameters startIndex: %s, length: %s for valueCount: %s", startIndex, length, valueCount); + to.clear(); + + internalStructVectorTransferPair.splitAndTransfer(startIndex, length); + final int startPoint = startIndex * TYPE_WIDTH; + final int sliceLength = length * TYPE_WIDTH; + final ArrowBuf slicedBuffer = typeBuffer.slice(startPoint, sliceLength); + final ReferenceManager refManager = slicedBuffer.getReferenceManager(); + to.typeBuffer = refManager.transferOwnership(slicedBuffer, to.allocator).getTransferredBuffer(); + to.setValueCount(length); + } + + @Override + public ValueVector getTo() { + return to; + } + + @Override + public void copyValueSafe(int from, int to) { + this.to.copyFrom(from, to, UnionVector.this); + } + } + + @Override + public FieldReader getReader() { + if (reader == null) { + reader = new UnionReader(this); + } + return reader; + } + + public FieldWriter getWriter() { + if (writer == null) { + writer = new UnionWriter(this); + } + return writer; + } + + @Override + public int getBufferSize() { + if (valueCount == 0) { return 0; } + + return (valueCount * TYPE_WIDTH) + internalStruct.getBufferSize(); + } + + @Override + public int getBufferSizeFor(final int valueCount) { + if (valueCount == 0) { + return 0; + } + + long bufferSize = 0; + for (final ValueVector v : (Iterable) this) { + bufferSize += v.getBufferSizeFor(valueCount); + } + + return (int) bufferSize + (valueCount * TYPE_WIDTH); + } + + @Override + public ArrowBuf[] getBuffers(boolean clear) { + List list = new java.util.ArrayList<>(); + setReaderAndWriterIndex(); + if (getBufferSize() != 0) { + list.add(typeBuffer); + list.addAll(java.util.Arrays.asList(internalStruct.getBuffers(clear))); + } + if (clear) { + valueCount = 0; + typeBuffer.getReferenceManager().retain(); + typeBuffer.getReferenceManager().release(); + typeBuffer = allocator.getEmpty(); + } + return list.toArray(new ArrowBuf[list.size()]); + } + + @Override + public Iterator iterator() { + return internalStruct.iterator(); + } + + public ValueVector getVector(int index) { + return getVector(index, null); + } + + public ValueVector getVector(int index, ArrowType arrowType) { + int type = typeBuffer.getByte(index * TYPE_WIDTH); + return getVectorByType(type, arrowType); + } + + public ValueVector getVectorByType(int typeId) { + return getVectorByType(typeId, null); + } + + public ValueVector getVectorByType(int typeId, ArrowType arrowType) { + Field type = typeIds[typeId]; + Types.MinorType minorType; + String name = null; + if (type == null) { + minorType = Types.MinorType.values()[typeId]; + } else { + minorType = Types.getMinorTypeForArrowType(type.getType()); + name = type.getName(); + } + switch (minorType) { + case NULL: + return null; + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + case ${name?upper_case}: + return get${name}Vector(name<#if minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary">, arrowType); + + + + case STRUCT: + return getStruct(); + case LIST: + return getList(); + case MAP: + return getMap(name, arrowType); + default: + throw new UnsupportedOperationException("Cannot support type: " + MinorType.values()[typeId]); + } + } + + public Object getObject(int index) { + ValueVector vector = getVector(index); + if (vector != null) { + return vector.isNull(index) ? null : vector.getObject(index); + } + return null; + } + + public byte[] get(int index) { + return null; + } + + public void get(int index, ComplexHolder holder) { + } + + public void get(int index, UnionHolder holder) { + FieldReader reader = new UnionReader(UnionVector.this); + reader.setPosition(index); + holder.reader = reader; + } + + public int getValueCount() { + return valueCount; + } + + /** + * IMPORTANT: Union types always return non null as there is no validity buffer. + * + * To check validity correctly you must check the underlying vector. + */ + public boolean isNull(int index) { + return false; + } + + @Override + public int getNullCount() { + return 0; + } + + public int isSet(int index) { + return isNull(index) ? 0 : 1; + } + + UnionWriter writer; + + public void setValueCount(int valueCount) { + this.valueCount = valueCount; + while (valueCount > getTypeBufferValueCapacity()) { + reallocTypeBuffer(); + } + internalStruct.setValueCount(valueCount); + } + + public void setSafe(int index, UnionHolder holder) { + setSafe(index, holder, null); + } + + public void setSafe(int index, UnionHolder holder, ArrowType arrowType) { + FieldReader reader = holder.reader; + if (writer == null) { + writer = new UnionWriter(UnionVector.this); + } + writer.setPosition(index); + MinorType type = reader.getMinorType(); + switch (type) { + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + case ${name?upper_case}: + Nullable${name}Holder ${uncappedName}Holder = new Nullable${name}Holder(); + reader.read(${uncappedName}Holder); + <#if minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + setSafe(index, ${uncappedName}Holder, arrowType); + <#else> + setSafe(index, ${uncappedName}Holder); + + break; + + + + case STRUCT: { + ComplexCopier.copy(reader, writer); + break; + } + case LIST: { + ComplexCopier.copy(reader, writer); + break; + } + default: + throw new UnsupportedOperationException(); + } + } + + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + <#if minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + public void setSafe(int index, Nullable${name}Holder holder, ArrowType arrowType) { + setType(index, MinorType.${name?upper_case}); + get${name}Vector(null, arrowType).setSafe(index, holder); + } + <#else> + public void setSafe(int index, Nullable${name}Holder holder) { + setType(index, MinorType.${name?upper_case}); + get${name}Vector(null).setSafe(index, holder); + } + + + + + + public void setType(int index, MinorType type) { + while (index >= getTypeBufferValueCapacity()) { + reallocTypeBuffer(); + } + typeBuffer.setByte(index * TYPE_WIDTH , (byte) type.ordinal()); + } + + private int getTypeBufferValueCapacity() { + return capAtMaxInt(typeBuffer.capacity() / TYPE_WIDTH); + } + + @Override + public int hashCode(int index) { + return hashCode(index, null); + } + + @Override + public int hashCode(int index, ArrowBufHasher hasher) { + ValueVector vec = getVector(index); + if (vec == null) { + return ArrowBufPointer.NULL_HASH_CODE; + } + return vec.hashCode(index, hasher); + } + + @Override + public OUT accept(VectorVisitor visitor, IN value) { + return visitor.visit(this, value); + } + + @Override + public String getName() { + return name; + } + + @Override + public String toString() { + return ValueVectorUtility.getToString(this, 0, getValueCount()); + } + + @Override + public T addOrGet(String name, FieldType fieldType, Class clazz) { + return internalStruct.addOrGet(name, fieldType, clazz); + } + + @Override + public T getChild(String name, Class clazz) { + return internalStruct.getChild(name, clazz); + } + + @Override + public VectorWithOrdinal getChildVectorWithOrdinal(String name) { + return internalStruct.getChildVectorWithOrdinal(name); + } + + @Override + public int size() { + return internalStruct.size(); + } + + @Override + public void setInitialCapacity(int valueCount, double density) { + for (final ValueVector vector : internalStruct) { + if (vector instanceof DensityAwareVector) { + ((DensityAwareVector) vector).setInitialCapacity(valueCount, density); + } else { + vector.setInitialCapacity(valueCount); + } + } + } + + /** + * Set the element at the given index to null. For UnionVector, it throws an UnsupportedOperationException + * as nulls are not supported at the top level and isNull() always returns false. + * + * @param index position of element + * @throws UnsupportedOperationException whenever invoked + */ + @Override + public void setNull(int index) { + throw new UnsupportedOperationException("The method setNull() is not supported on UnionVector."); + } +} diff --git a/java/vector/target/classes/codegen/templates/UnionWriter.java b/java/vector/target/classes/codegen/templates/UnionWriter.java new file mode 100644 index 000000000000..08dbf24324b1 --- /dev/null +++ b/java/vector/target/classes/codegen/templates/UnionWriter.java @@ -0,0 +1,459 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.complex.impl.NullableStructWriterFactory; +import org.apache.arrow.vector.types.Types; + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/UnionWriter.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> +import org.apache.arrow.vector.complex.writer.BaseWriter; +import org.apache.arrow.vector.types.Types.MinorType; + +<#function is_timestamp_tz type> + <#return type?starts_with("TimeStamp") && type?ends_with("TZ")> + + + +/* + * This class is generated using freemarker and the ${.template_name} template. + */ +@SuppressWarnings("unused") +public class UnionWriter extends AbstractFieldWriter implements FieldWriter { + + UnionVector data; + private StructWriter structWriter; + private UnionListWriter listWriter; + private UnionMapWriter mapWriter; + private List writers = new java.util.ArrayList<>(); + private final NullableStructWriterFactory nullableStructWriterFactory; + + public UnionWriter(UnionVector vector) { + this(vector, NullableStructWriterFactory.getNullableStructWriterFactoryInstance()); + } + + public UnionWriter(UnionVector vector, NullableStructWriterFactory nullableStructWriterFactory) { + data = vector; + this.nullableStructWriterFactory = nullableStructWriterFactory; + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + for (BaseWriter writer : writers) { + writer.setPosition(index); + } + } + + + @Override + public void start() { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().start(); + } + + @Override + public void end() { + getStructWriter().end(); + } + + @Override + public void startList() { + getListWriter().startList(); + data.setType(idx(), MinorType.LIST); + } + + @Override + public void endList() { + getListWriter().endList(); + } + + @Override + public void startMap() { + getMapWriter().startMap(); + data.setType(idx(), MinorType.MAP); + } + + @Override + public void endMap() { + getMapWriter().endMap(); + } + + @Override + public void startEntry() { + getMapWriter().startEntry(); + } + + @Override + public MapWriter key() { + return getMapWriter().key(); + } + + @Override + public MapWriter value() { + return getMapWriter().value(); + } + + @Override + public void endEntry() { + getMapWriter().endEntry(); + } + + private StructWriter getStructWriter() { + if (structWriter == null) { + structWriter = nullableStructWriterFactory.build(data.getStruct()); + structWriter.setPosition(idx()); + writers.add(structWriter); + } + return structWriter; + } + + public StructWriter asStruct() { + data.setType(idx(), MinorType.STRUCT); + return getStructWriter(); + } + + private ListWriter getListWriter() { + if (listWriter == null) { + listWriter = new UnionListWriter(data.getList(), nullableStructWriterFactory); + listWriter.setPosition(idx()); + writers.add(listWriter); + } + return listWriter; + } + + public ListWriter asList() { + data.setType(idx(), MinorType.LIST); + return getListWriter(); + } + + private MapWriter getMapWriter() { + if (mapWriter == null) { + mapWriter = new UnionMapWriter(data.getMap(new ArrowType.Map(false))); + mapWriter.setPosition(idx()); + writers.add(mapWriter); + } + return mapWriter; + } + + private MapWriter getMapWriter(ArrowType arrowType) { + if (mapWriter == null) { + mapWriter = new UnionMapWriter(data.getMap(arrowType)); + mapWriter.setPosition(idx()); + writers.add(mapWriter); + } + return mapWriter; + } + + public MapWriter asMap(ArrowType arrowType) { + data.setType(idx(), MinorType.MAP); + return getMapWriter(arrowType); + } + + BaseWriter getWriter(MinorType minorType) { + return getWriter(minorType, null); + } + + BaseWriter getWriter(MinorType minorType, ArrowType arrowType) { + switch (minorType) { + case STRUCT: + return getStructWriter(); + case LIST: + return getListWriter(); + case MAP: + return getMapWriter(arrowType); + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + case ${name?upper_case}: + <#if minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + return get${name}Writer(arrowType); + <#else> + return get${name}Writer(); + + + + + default: + throw new UnsupportedOperationException("Unknown type: " + minorType); + } + } + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#assign friendlyType = (minor.friendlyType!minor.boxedType!type.boxedType) /> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + + private ${name}Writer ${name?uncap_first}Writer; + + <#if minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + private ${name}Writer get${name}Writer(ArrowType arrowType) { + if (${uncappedName}Writer == null) { + ${uncappedName}Writer = new ${name}WriterImpl(data.get${name}Vector(arrowType)); + ${uncappedName}Writer.setPosition(idx()); + writers.add(${uncappedName}Writer); + } + return ${uncappedName}Writer; + } + + public ${name}Writer as${name}(ArrowType arrowType) { + data.setType(idx(), MinorType.${name?upper_case}); + return get${name}Writer(arrowType); + } + <#else> + private ${name}Writer get${name}Writer() { + if (${uncappedName}Writer == null) { + ${uncappedName}Writer = new ${name}WriterImpl(data.get${name}Vector()); + ${uncappedName}Writer.setPosition(idx()); + writers.add(${uncappedName}Writer); + } + return ${uncappedName}Writer; + } + + public ${name}Writer as${name}() { + data.setType(idx(), MinorType.${name?upper_case}); + return get${name}Writer(); + } + + + @Override + public void write(${name}Holder holder) { + data.setType(idx(), MinorType.${name?upper_case}); + <#if minor.class?starts_with("Decimal")> + ArrowType arrowType = new ArrowType.Decimal(holder.precision, holder.scale, ${name}Holder.WIDTH * 8); + get${name}Writer(arrowType).setPosition(idx()); + get${name}Writer(arrowType).write${name}(<#list fields as field>holder.${field.name}<#if field_has_next>, , arrowType); + <#elseif is_timestamp_tz(minor.class)> + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.${name?upper_case?remove_ending("TZ")}.getType(); + ArrowType arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), holder.timezone); + get${name}Writer(arrowType).setPosition(idx()); + get${name}Writer(arrowType).write(holder); + <#elseif minor.class == "Duration"> + ArrowType arrowType = new ArrowType.Duration(holder.unit); + get${name}Writer(arrowType).setPosition(idx()); + get${name}Writer(arrowType).write(holder); + <#elseif minor.class == "FixedSizeBinary"> + ArrowType arrowType = new ArrowType.FixedSizeBinary(holder.byteWidth); + get${name}Writer(arrowType).setPosition(idx()); + get${name}Writer(arrowType).write(holder); + <#else> + get${name}Writer().setPosition(idx()); + get${name}Writer().write${name}(<#list fields as field>holder.${field.name}<#if field_has_next>, ); + + } + + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, <#if minor.class?starts_with("Decimal")>, ArrowType arrowType) { + data.setType(idx(), MinorType.${name?upper_case}); + <#if minor.class?starts_with("Decimal")> + get${name}Writer(arrowType).setPosition(idx()); + get${name}Writer(arrowType).write${name}(<#list fields as field>${field.name}<#if field_has_next>, , arrowType); + <#elseif is_timestamp_tz(minor.class)> + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.${name?upper_case?remove_ending("TZ")}.getType(); + ArrowType arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), "UTC"); + get${name}Writer(arrowType).setPosition(idx()); + get${name}Writer(arrowType).write${name}(<#list fields as field>${field.name}<#if field_has_next>, ); + <#elseif minor.class == "Duration" || minor.class == "FixedSizeBinary"> + // This is expected to throw. There's nothing more that we can do here since we can't infer any + // sort of default unit for the Duration or a default width for the FixedSizeBinary types. + ArrowType arrowType = MinorType.${name?upper_case}.getType(); + get${name}Writer(arrowType).setPosition(idx()); + get${name}Writer(arrowType).write${name}(<#list fields as field>${field.name}<#if field_has_next>, ); + <#else> + get${name}Writer().setPosition(idx()); + get${name}Writer().write${name}(<#list fields as field>${field.name}<#if field_has_next>, ); + + } + <#if minor.class?starts_with("Decimal")> + public void write${name}(${friendlyType} value) { + data.setType(idx(), MinorType.${name?upper_case}); + ArrowType arrowType = new ArrowType.Decimal(value.precision(), value.scale(), ${name}Vector.TYPE_WIDTH * 8); + get${name}Writer(arrowType).setPosition(idx()); + get${name}Writer(arrowType).write${name}(value); + } + + public void writeBigEndianBytesTo${name}(byte[] value, ArrowType arrowType) { + data.setType(idx(), MinorType.${name?upper_case}); + get${name}Writer(arrowType).setPosition(idx()); + get${name}Writer(arrowType).writeBigEndianBytesTo${name}(value, arrowType); + } + <#elseif minor.class?ends_with("VarBinary")> + @Override + public void write${minor.class}(byte[] value) { + get${name}Writer().setPosition(idx()); + get${name}Writer().write${minor.class}(value); + } + + @Override + public void write${minor.class}(byte[] value, int offset, int length) { + get${name}Writer().setPosition(idx()); + get${name}Writer().write${minor.class}(value, offset, length); + } + + @Override + public void write${minor.class}(ByteBuffer value) { + get${name}Writer().setPosition(idx()); + get${name}Writer().write${minor.class}(value); + } + + @Override + public void write${minor.class}(ByteBuffer value, int offset, int length) { + get${name}Writer().setPosition(idx()); + get${name}Writer().write${minor.class}(value, offset, length); + } + <#elseif minor.class?ends_with("VarChar")> + @Override + public void write${minor.class}(${friendlyType} value) { + get${name}Writer().setPosition(idx()); + get${name}Writer().write${minor.class}(value); + } + + @Override + public void write${minor.class}(String value) { + get${name}Writer().setPosition(idx()); + get${name}Writer().write${minor.class}(value); + } + + + + + + public void writeNull() { + } + + @Override + public StructWriter struct() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().struct(); + } + + @Override + public ListWriter list() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().list(); + } + + @Override + public ListWriter list(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().list(name); + } + + @Override + public StructWriter struct(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().struct(name); + } + + @Override + public MapWriter map() { + data.setType(idx(), MinorType.MAP); + getListWriter().setPosition(idx()); + return getListWriter().map(); + } + + @Override + public MapWriter map(boolean keysSorted) { + data.setType(idx(), MinorType.MAP); + getListWriter().setPosition(idx()); + return getListWriter().map(keysSorted); + } + + @Override + public MapWriter map(String name) { + data.setType(idx(), MinorType.MAP); + getStructWriter().setPosition(idx()); + return getStructWriter().map(name); + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + data.setType(idx(), MinorType.MAP); + getStructWriter().setPosition(idx()); + return getStructWriter().map(name, keysSorted); + } + + <#list vv.types as type><#list type.minor as minor> + <#assign lowerName = minor.class?uncap_first /> + <#if lowerName == "int" ><#assign lowerName = "integer" /> + <#assign upperName = minor.class?upper_case /> + <#assign capName = minor.class?cap_first /> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + @Override + public ${capName}Writer ${lowerName}(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().${lowerName}(name); + } + + @Override + public ${capName}Writer ${lowerName}() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().${lowerName}(); + } + + <#if minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + @Override + public ${capName}Writer ${lowerName}(String name<#list minor.typeParams as typeParam>, ${typeParam.type} ${typeParam.name}) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().${lowerName}(name<#list minor.typeParams as typeParam>, ${typeParam.name}); + } + + + + @Override + public void allocate() { + data.allocateNew(); + } + + @Override + public void clear() { + data.clear(); + } + + @Override + public void close() throws Exception { + data.close(); + } + + @Override + public Field getField() { + return data.getField(); + } + + @Override + public int getValueCapacity() { + return data.getValueCapacity(); + } +} diff --git a/java/vector/target/classes/codegen/templates/ValueHolders.java b/java/vector/target/classes/codegen/templates/ValueHolders.java new file mode 100644 index 000000000000..973efd870a66 --- /dev/null +++ b/java/vector/target/classes/codegen/templates/ValueHolders.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +<@pp.dropOutputFile /> +<#list vv.modes as mode> +<#list vv.types as type> +<#list type.minor as minor> + +<#assign className="${mode.prefix}${minor.class}Holder" /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/holders/${className}.java" /> + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.holders; + +<#include "/@includes/vv_imports.ftl" /> + +/** + * Source code generated using FreeMarker template ${.template_name} + */ +public final class ${className} implements ValueHolder{ + + <#if mode.name == "Repeated"> + + /** The first index (inclusive) into the Vector. **/ + public int start; + + /** The last index (exclusive) into the Vector. **/ + public int end; + + /** The Vector holding the actual values. **/ + public ${minor.class}Vector vector; + + <#else> + public static final int WIDTH = ${type.width}; + + <#if mode.name == "Optional">public int isSet; + <#else>public final int isSet = 1; + <#assign fields = (minor.fields!type.fields) + (minor.typeParams![]) /> + <#list fields as field> + public ${field.type} ${field.name}; + + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + + +} + + + + \ No newline at end of file diff --git a/java/vector/target/codegen/config.fmpp b/java/vector/target/codegen/config.fmpp new file mode 100644 index 000000000000..ef5a5072a75a --- /dev/null +++ b/java/vector/target/codegen/config.fmpp @@ -0,0 +1,24 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +data: { + # TODO: Rename to ~valueVectorModesAndTypes for clarity. + vv: tdd(../data/ValueVectorTypes.tdd), + arrowTypes: tdd(../data/ArrowTypes.tdd) + +} +freemarkerLinks: { + includes: includes/ +} diff --git a/java/vector/target/codegen/data/ArrowTypes.tdd b/java/vector/target/codegen/data/ArrowTypes.tdd new file mode 100644 index 000000000000..3cf9a968791a --- /dev/null +++ b/java/vector/target/codegen/data/ArrowTypes.tdd @@ -0,0 +1,124 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +{ + types: [ + { + name: "Null", + fields: [], + complex: false + }, + { + name: "Struct_", + fields: [], + complex: true + }, + { + name: "List", + fields: [], + complex: true + }, + { + name: "LargeList", + fields: [], + complex: true + }, + { + name: "FixedSizeList", + fields: [{name: "listSize", type: int}], + complex: true + }, + { + name: "Union", + fields: [{name: "mode", type: short, valueType: UnionMode}, {name: "typeIds", type: "int[]"}], + complex: true + }, + { + name: "Map", + fields: [{name: "keysSorted", type: boolean}], + complex: true + }, + { + name: "Int", + fields: [{name: "bitWidth", type: int}, {name: "isSigned", type: boolean}], + complex: false + }, + { + name: "FloatingPoint", + fields: [{name: precision, type: short, valueType: FloatingPointPrecision}], + complex: false + }, + { + name: "Utf8", + fields: [], + complex: false + }, + { + name: "LargeUtf8", + fields: [], + complex: false + }, + { + name: "Binary", + fields: [], + complex: false + }, + { + name: "LargeBinary", + fields: [], + complex: false + }, + { + name: "FixedSizeBinary", + fields: [{name: "byteWidth", type: int}], + complex: false + } + { + name: "Bool", + fields: [], + complex: false + }, + { + name: "Decimal", + fields: [{name: "precision", type: int}, {name: "scale", type: int}, {name: "bitWidth", type: int}], + complex: false + }, + { + name: "Date", + fields: [{name: "unit", type: short, valueType: DateUnit}] + complex: false + }, + { + name: "Time", + fields: [{name: "unit", type: short, valueType: TimeUnit}, {name: "bitWidth", type: int}], + complex: false + }, + { + name: "Timestamp", + fields: [{name: "unit", type: short, valueType: TimeUnit}, {name: "timezone", type: String}] + complex: false + }, + { + name: "Interval", + fields: [{name: "unit", type: short, valueType: IntervalUnit}], + complex: false + }, + { + name: "Duration", + fields: [{name: "unit", type: short, valueType: TimeUnit}], + complex: false + } + ] +} diff --git a/java/vector/target/codegen/data/ValueVectorTypes.tdd b/java/vector/target/codegen/data/ValueVectorTypes.tdd new file mode 100644 index 000000000000..6c2a96771245 --- /dev/null +++ b/java/vector/target/codegen/data/ValueVectorTypes.tdd @@ -0,0 +1,216 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +{ + modes: [ + {name: "Optional", prefix: "Nullable"}, + {name: "Required", prefix: ""} + ], + types: [ + { + major: "Fixed", + width: 1, + javaType: "byte", + boxedType: "Byte", + fields: [{name: "value", type: "byte"}], + minor: [ + { class: "TinyInt", valueHolder: "IntHolder" }, + { class: "UInt1", valueHolder: "UInt1Holder" } + ] + }, + { + major: "Fixed", + width: 2, + javaType: "char", + boxedType: "Character", + fields: [{name: "value", type: "char"}], + minor: [ + { class: "UInt2", valueHolder: "UInt2Holder"} + ] + }, { + major: "Fixed", + width: 2, + javaType: "short", + boxedType: "Short", + fields: [{name: "value", type: "short"}], + minor: [ + { class: "SmallInt", valueHolder: "Int2Holder"}, + ] + }, + { + major: "Fixed", + width: 2, + javaType: "short", + boxedType: "Short", + fields: [{name: "value", type: "short"}], + minor: [ + { class: "Float2", valueHolder: "Int2Holder"}, + ] + }, + { + major: "Fixed", + width: 4, + javaType: "int", + boxedType: "Integer", + fields: [{name: "value", type: "int"}], + minor: [ + { class: "Int", valueHolder: "IntHolder"}, + { class: "UInt4", valueHolder: "UInt4Holder" }, + { class: "Float4", javaType: "float" , boxedType: "Float", fields: [{name: "value", type: "float"}]}, + { class: "DateDay" }, + { class: "IntervalYear", javaType: "int", friendlyType: "Period" }, + { class: "TimeSec" }, + { class: "TimeMilli", javaType: "int", friendlyType: "LocalDateTime" } + ] + }, + { + major: "Fixed", + width: 8, + javaType: "long", + boxedType: "Long", + fields: [{name: "value", type: "long"}], + minor: [ + { class: "BigInt"}, + { class: "UInt8" }, + { class: "Float8", javaType: "double", boxedType: "Double", fields: [{name: "value", type: "double"}] }, + { class: "DateMilli", javaType: "long", friendlyType: "LocalDateTime" }, + { class: "Duration", javaType: "long", friendlyType: "Duration", + arrowType: "org.apache.arrow.vector.types.pojo.ArrowType.Duration", + typeParams: [ {name: "unit", type: "org.apache.arrow.vector.types.TimeUnit"} ], + arrowTypeConstructorParams: ["unit"]} + { class: "TimeStampSec", javaType: "long", boxedType: "Long", friendlyType: "LocalDateTime" }, + { class: "TimeStampMilli", javaType: "long", boxedType: "Long", friendlyType: "LocalDateTime" }, + { class: "TimeStampMicro", javaType: "long", boxedType: "Long", friendlyType: "LocalDateTime" }, + { class: "TimeStampNano", javaType: "long", boxedType: "Long", friendlyType: "LocalDateTime" }, + { class: "TimeStampSecTZ", javaType: "long", boxedType: "Long", + typeParams: [ {name: "timezone", type: "String"} ], + arrowType: "org.apache.arrow.vector.types.pojo.ArrowType.Timestamp", + arrowTypeConstructorParams: ["org.apache.arrow.vector.types.TimeUnit.SECOND", "timezone"] }, + { class: "TimeStampMilliTZ", javaType: "long", boxedType: "Long", + typeParams: [ {name: "timezone", type: "String"} ], + arrowType: "org.apache.arrow.vector.types.pojo.ArrowType.Timestamp", + arrowTypeConstructorParams: ["org.apache.arrow.vector.types.TimeUnit.MILLISECOND", "timezone"] }, + { class: "TimeStampMicroTZ", javaType: "long", boxedType: "Long", + typeParams: [ {name: "timezone", type: "String"} ], + arrowType: "org.apache.arrow.vector.types.pojo.ArrowType.Timestamp", + arrowTypeConstructorParams: ["org.apache.arrow.vector.types.TimeUnit.MICROSECOND", "timezone"] }, + { class: "TimeStampNanoTZ", javaType: "long", boxedType: "Long", + typeParams: [ {name: "timezone", type: "String"} ], + arrowType: "org.apache.arrow.vector.types.pojo.ArrowType.Timestamp", + arrowTypeConstructorParams: ["org.apache.arrow.vector.types.TimeUnit.NANOSECOND", "timezone"] }, + { class: "TimeMicro" }, + { class: "TimeNano" } + ] + }, + { + major: "Fixed", + width: 8, + javaType: "ArrowBuf", + boxedType: "ArrowBuf", + minor: [ + { class: "IntervalDay", millisecondsOffset: 4, friendlyType: "Duration", fields: [ {name: "days", type:"int"}, {name: "milliseconds", type:"int"}] } + ] + }, + { + major: "Fixed", + width: 16, + javaType: "ArrowBuf", + boxedType: "ArrowBuf", + minor: [ + { class: "IntervalMonthDayNano", daysOffset: 4, nanosecondsOffset: 8, friendlyType: "PeriodDuration", fields: [ {name: "months", type:"int"}, {name: "days", type:"int"}, {name: "nanoseconds", type:"long"}] } + ] + }, + + { + major: "Fixed", + width: 32, + javaType: "ArrowBuf", + boxedType: "ArrowBuf", + + minor: [ + { + class: "Decimal256", + maxPrecisionDigits: 76, nDecimalDigits: 4, friendlyType: "BigDecimal", + typeParams: [ {name: "scale", type: "int"}, { name: "precision", type: "int"}], + arrowType: "org.apache.arrow.vector.types.pojo.ArrowType.Decimal", + fields: [{name: "start", type: "long"}, {name: "buffer", type: "ArrowBuf"}] + } + ] + }, + { + major: "Fixed", + width: 16, + javaType: "ArrowBuf", + boxedType: "ArrowBuf", + + minor: [ + { + class: "Decimal", + maxPrecisionDigits: 38, nDecimalDigits: 4, friendlyType: "BigDecimal", + typeParams: [ {name: "scale", type: "int"}, { name: "precision", type: "int"}], + arrowType: "org.apache.arrow.vector.types.pojo.ArrowType.Decimal", + fields: [{name: "start", type: "long"}, {name: "buffer", type: "ArrowBuf"}] + } + ] + }, + + { + major: "Fixed", + width: -1, + javaType: "byte[]", + boxedType: "ArrowBuf", + minor: [ + { + class: "FixedSizeBinary", + typeParams: [ {name: "byteWidth", type: "int"} ], + arrowType: "org.apache.arrow.vector.types.pojo.ArrowType.FixedSizeBinary", + friendlyType: "byte[]", + fields: [{name: "buffer", type: "ArrowBuf"}], + } + ] + }, + { + major: "VarLen", + width: 4, + javaType: "int", + boxedType: "ArrowBuf", + fields: [{name: "start", type: "int"}, {name: "end", type: "int"}, {name: "buffer", type: "ArrowBuf"}], + minor: [ + { class: "VarBinary" , friendlyType: "byte[]" }, + { class: "VarChar" , friendlyType: "Text" } + ] + }, + { + major: "VarLen", + width: 8, + javaType: "long", + boxedType: "ArrowBuf", + fields: [{name: "start", type: "long"}, {name: "end", type: "long"}, {name: "buffer", type: "ArrowBuf"}], + minor: [ + { class: "LargeVarChar" , friendlyType: "Text" } + { class: "LargeVarBinary" , friendlyType: "byte[]" } + ] + }, + { + major: "Bit", + width: 1, + javaType: "int", + boxedType: "Integer", + minor: [ + { class: "Bit" , friendlyType: "Boolean", fields: [{name: "value", type: "int"}] } + ] + } + ] +} diff --git a/java/vector/target/codegen/includes/license.ftl b/java/vector/target/codegen/includes/license.ftl new file mode 100644 index 000000000000..c6a5afeef509 --- /dev/null +++ b/java/vector/target/codegen/includes/license.ftl @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ \ No newline at end of file diff --git a/java/vector/target/codegen/includes/vv_imports.ftl b/java/vector/target/codegen/includes/vv_imports.ftl new file mode 100644 index 000000000000..f4c72a1a6cba --- /dev/null +++ b/java/vector/target/codegen/includes/vv_imports.ftl @@ -0,0 +1,58 @@ +<#-- + ~ Licensed to the Apache Software Foundation (ASF) under one or more + ~ contributor license agreements. See the NOTICE file distributed with + ~ this work for additional information regarding copyright ownership. + ~ The ASF licenses this file to You under the Apache License, Version 2.0 + ~ (the "License"); you may not use this file except in compliance with + ~ the License. You may obtain a copy of the License at + ~ + ~ http://www.apache.org/licenses/LICENSE-2.0 + ~ + ~ Unless required by applicable law or agreed to in writing, software + ~ distributed under the License is distributed on an "AS IS" BASIS, + ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + ~ See the License for the specific language governing permissions and + ~ limitations under the License. + --> + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + diff --git a/java/vector/target/codegen/templates/AbstractFieldReader.java b/java/vector/target/codegen/templates/AbstractFieldReader.java new file mode 100644 index 000000000000..e3c8729469c7 --- /dev/null +++ b/java/vector/target/codegen/templates/AbstractFieldReader.java @@ -0,0 +1,132 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/AbstractFieldReader.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + +/** + * Source code generated using FreeMarker template ${.template_name} + */ +@SuppressWarnings("unused") +abstract class AbstractFieldReader extends AbstractBaseReader implements FieldReader{ + + AbstractFieldReader(){ + super(); + } + + /** + * Returns true if the current value of the reader is not null + * @return whether the current value is set + */ + public boolean isSet() { + return true; + } + + @Override + public Field getField() { + fail("getField"); + return null; + } + + <#list ["Object", "BigDecimal", "Short", "Integer", "Long", "Boolean", + "LocalDateTime", "Duration", "Period", "Double", "Float", + "Character", "Text", "String", "Byte", "byte[]", "PeriodDuration"] as friendlyType> + <#assign safeType=friendlyType /> + <#if safeType=="byte[]"><#assign safeType="ByteArray" /> + public ${friendlyType} read${safeType}(int arrayIndex) { + fail("read${safeType}(int arrayIndex)"); + return null; + } + + public ${friendlyType} read${safeType}() { + fail("read${safeType}()"); + return null; + } + + + public void copyAsValue(StructWriter writer) { + fail("CopyAsValue StructWriter"); + } + + public void copyAsField(String name, StructWriter writer) { + fail("CopyAsField StructWriter"); + } + + public void copyAsField(String name, ListWriter writer) { + fail("CopyAsFieldList"); + } + + public void copyAsField(String name, MapWriter writer) { + fail("CopyAsFieldMap"); + } + + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> + <#assign boxedType = (minor.boxedType!type.boxedType) /> + public void read(${name}Holder holder) { + fail("${name}"); + } + + public void read(Nullable${name}Holder holder) { + fail("${name}"); + } + + public void read(int arrayIndex, ${name}Holder holder) { + fail("Repeated${name}"); + } + + public void read(int arrayIndex, Nullable${name}Holder holder) { + fail("Repeated${name}"); + } + + public void copyAsValue(${name}Writer writer) { + fail("CopyAsValue${name}"); + } + + public void copyAsField(String name, ${name}Writer writer) { + fail("CopyAsField${name}"); + } + + + public FieldReader reader(String name) { + fail("reader(String name)"); + return null; + } + + public FieldReader reader() { + fail("reader()"); + return null; + } + + public int size() { + fail("size()"); + return -1; + } + + private void fail(String name) { + throw new IllegalArgumentException(String.format("You tried to read a [%s] type when you are using a field reader of type [%s].", name, this.getClass().getSimpleName())); + } +} + + + diff --git a/java/vector/target/codegen/templates/AbstractFieldWriter.java b/java/vector/target/codegen/templates/AbstractFieldWriter.java new file mode 100644 index 000000000000..6c2368117f7c --- /dev/null +++ b/java/vector/target/codegen/templates/AbstractFieldWriter.java @@ -0,0 +1,261 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/AbstractFieldWriter.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + +/* + * This class is generated using freemarker and the ${.template_name} template. + * Note that changes to the AbstractFieldWriter template should also get reflected in the + * AbstractPromotableFieldWriter, ComplexWriters, UnionFixedSizeListWriter, UnionListWriter + * and UnionWriter templates and the PromotableWriter concrete code. + */ +@SuppressWarnings("unused") +abstract class AbstractFieldWriter extends AbstractBaseWriter implements FieldWriter { + + protected boolean addVectorAsNullable = true; + + /** + * Set flag to control the FieldType.nullable property when a writer creates a new vector. + * If true then vectors created will be nullable, this is the default behavior. If false then + * vectors created will be non-nullable. + * + * @param nullable Whether or not to create nullable vectors (default behavior is true) + */ + public void setAddVectorAsNullable(boolean nullable) { + addVectorAsNullable = nullable; + } + + @Override + public void start() { + throw new IllegalStateException(String.format("You tried to start when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public void end() { + throw new IllegalStateException(String.format("You tried to end when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public void startList() { + throw new IllegalStateException(String.format("You tried to start a list when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public void endList() { + throw new IllegalStateException(String.format("You tried to end a list when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public void startMap() { + throw new IllegalStateException(String.format("You tried to start a map when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public void endMap() { + throw new IllegalStateException(String.format("You tried to end a map when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public void startEntry() { + throw new IllegalStateException(String.format("You tried to start a map entry when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public MapWriter key() { + throw new IllegalStateException(String.format("You tried to start a map key when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public MapWriter value() { + throw new IllegalStateException(String.format("You tried to start a map value when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public void endEntry() { + throw new IllegalStateException(String.format("You tried to end a map entry when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign friendlyType = (minor.friendlyType!minor.boxedType!type.boxedType) /> + @Override + public void write(${name}Holder holder) { + fail("${name}"); + } + + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, ) { + fail("${name}"); + } + + <#if minor.class?starts_with("Decimal")> + public void write${minor.class}(${friendlyType} value) { + fail("${name}"); + } + + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, , ArrowType arrowType) { + fail("${name}"); + } + + public void writeBigEndianBytesTo${minor.class}(byte[] value) { + fail("${name}"); + } + + public void writeBigEndianBytesTo${minor.class}(byte[] value, ArrowType arrowType) { + fail("${name}"); + } + + + <#if minor.class?ends_with("VarBinary")> + public void write${minor.class}(byte[] value) { + fail("${name}"); + } + + public void write${minor.class}(byte[] value, int offset, int length) { + fail("${name}"); + } + + public void write${minor.class}(ByteBuffer value) { + fail("${name}"); + } + + public void write${minor.class}(ByteBuffer value, int offset, int length) { + fail("${name}"); + } + + + <#if minor.class?ends_with("VarChar")> + public void write${minor.class}(${friendlyType} value) { + fail("${name}"); + } + + public void write${minor.class}(String value) { + fail("${name}"); + } + + + + + public void writeNull() { + fail("${name}"); + } + + /** + * This implementation returns {@code false}. + *

+ * Must be overridden by struct writers. + *

+ */ + @Override + public boolean isEmptyStruct() { + return false; + } + + @Override + public StructWriter struct() { + fail("Struct"); + return null; + } + + @Override + public ListWriter list() { + fail("List"); + return null; + } + + @Override + public MapWriter map() { + fail("Map"); + return null; + } + + @Override + public StructWriter struct(String name) { + fail("Struct"); + return null; + } + + @Override + public ListWriter list(String name) { + fail("List"); + return null; + } + + @Override + public MapWriter map(String name) { + fail("Map"); + return null; + } + + @Override + public MapWriter map(boolean keysSorted) { + fail("Map"); + return null; + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + fail("Map"); + return null; + } + <#list vv.types as type><#list type.minor as minor> + <#assign lowerName = minor.class?uncap_first /> + <#if lowerName == "int" ><#assign lowerName = "integer" /> + <#assign upperName = minor.class?upper_case /> + <#assign capName = minor.class?cap_first /> + <#if minor.typeParams?? > + + @Override + public ${capName}Writer ${lowerName}(String name<#list minor.typeParams as typeParam>, ${typeParam.type} ${typeParam.name}) { + fail("${capName}(" + <#list minor.typeParams as typeParam>"${typeParam.name}: " + ${typeParam.name} + ", " + ")"); + return null; + } + + + @Override + public ${capName}Writer ${lowerName}(String name) { + fail("${capName}"); + return null; + } + + @Override + public ${capName}Writer ${lowerName}() { + fail("${capName}"); + return null; + } + + + + public void copyReader(FieldReader reader) { + fail("Copy FieldReader"); + } + + public void copyReaderToField(String name, FieldReader reader) { + fail("Copy FieldReader to STring"); + } + + private void fail(String name) { + throw new IllegalArgumentException(String.format("You tried to write a %s type when you are using a ValueWriter of type %s.", name, this.getClass().getSimpleName())); + } +} diff --git a/java/vector/target/codegen/templates/AbstractPromotableFieldWriter.java b/java/vector/target/codegen/templates/AbstractPromotableFieldWriter.java new file mode 100644 index 000000000000..59f9fb5b8098 --- /dev/null +++ b/java/vector/target/codegen/templates/AbstractPromotableFieldWriter.java @@ -0,0 +1,331 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/AbstractPromotableFieldWriter.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + +<#function is_timestamp_tz type> + <#return type?starts_with("TimeStamp") && type?ends_with("TZ")> + + +/* + * A FieldWriter which delegates calls to another FieldWriter. The delegate FieldWriter can be promoted to a new type + * when necessary. Classes that extend this class are responsible for handling promotion. + * + * This class is generated using freemarker and the ${.template_name} template. + * + */ +@SuppressWarnings("unused") +abstract class AbstractPromotableFieldWriter extends AbstractFieldWriter { + /** + * Retrieve the FieldWriter, promoting if it is not a FieldWriter of the specified type + * @param type the type of the values we want to write + * @return the corresponding field writer + */ + protected FieldWriter getWriter(MinorType type) { + return getWriter(type, null); + } + + abstract protected FieldWriter getWriter(MinorType type, ArrowType arrowType); + + /** + * @return the current FieldWriter + */ + abstract protected FieldWriter getWriter(); + + @Override + public void start() { + getWriter(MinorType.STRUCT).start(); + } + + @Override + public void end() { + getWriter(MinorType.STRUCT).end(); + setPosition(idx() + 1); + } + + @Override + public void startList() { + getWriter(MinorType.LIST).startList(); + } + + @Override + public void endList() { + getWriter(MinorType.LIST).endList(); + setPosition(idx() + 1); + } + + @Override + public void startMap() { + getWriter(MinorType.MAP).startMap(); + } + + @Override + public void endMap() { + getWriter(MinorType.MAP).endMap(); + setPosition(idx() + 1); + } + + @Override + public void startEntry() { + getWriter(MinorType.MAP).startEntry(); + } + + @Override + public MapWriter key() { + return getWriter(MinorType.MAP).key(); + } + + @Override + public MapWriter value() { + return getWriter(MinorType.MAP).value(); + } + + @Override + public void endEntry() { + getWriter(MinorType.MAP).endEntry(); + } + + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#if minor.class == "Decimal"> + @Override + public void write(DecimalHolder holder) { + getWriter(MinorType.DECIMAL).write(holder); + } + + public void writeDecimal(int start, ArrowBuf buffer, ArrowType arrowType) { + getWriter(MinorType.DECIMAL).writeDecimal(start, buffer, arrowType); + } + + public void writeDecimal(int start, ArrowBuf buffer) { + getWriter(MinorType.DECIMAL).writeDecimal(start, buffer); + } + + public void writeBigEndianBytesToDecimal(byte[] value, ArrowType arrowType) { + getWriter(MinorType.DECIMAL).writeBigEndianBytesToDecimal(value, arrowType); + } + + public void writeBigEndianBytesToDecimal(byte[] value) { + getWriter(MinorType.DECIMAL).writeBigEndianBytesToDecimal(value); + } + <#elseif minor.class == "Decimal256"> + @Override + public void write(Decimal256Holder holder) { + getWriter(MinorType.DECIMAL256).write(holder); + } + + public void writeDecimal256(long start, ArrowBuf buffer, ArrowType arrowType) { + getWriter(MinorType.DECIMAL256).writeDecimal256(start, buffer, arrowType); + } + + public void writeDecimal256(long start, ArrowBuf buffer) { + getWriter(MinorType.DECIMAL256).writeDecimal256(start, buffer); + } + public void writeBigEndianBytesToDecimal256(byte[] value, ArrowType arrowType) { + getWriter(MinorType.DECIMAL256).writeBigEndianBytesToDecimal256(value, arrowType); + } + + public void writeBigEndianBytesToDecimal256(byte[] value) { + getWriter(MinorType.DECIMAL256).writeBigEndianBytesToDecimal256(value); + } + <#elseif is_timestamp_tz(minor.class)> + @Override + public void write(${name}Holder holder) { + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.${name?upper_case?remove_ending("TZ")}.getType(); + // Take the holder.timezone similar to how PromotableWriter.java:write(DecimalHolder) takes the scale from the holder. + ArrowType.Timestamp arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), holder.timezone); + getWriter(MinorType.${name?upper_case}, arrowType).write(holder); + } + + /** + * @deprecated + * The holder version should be used instead otherwise the timezone will default to UTC. + * @see #write(${name}Holder) + */ + @Deprecated + @Override + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, ) { + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.${name?upper_case?remove_ending("TZ")}.getType(); + // Assumes UTC if no timezone is provided + ArrowType.Timestamp arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), "UTC"); + getWriter(MinorType.${name?upper_case}, arrowType).write${minor.class}(<#list fields as field>${field.name}<#if field_has_next>, ); + } + <#elseif minor.class == "Duration"> + @Override + public void write(${name}Holder holder) { + ArrowType.Duration arrowType = new ArrowType.Duration(holder.unit); + getWriter(MinorType.${name?upper_case}, arrowType).write(holder); + } + + /** + * @deprecated + * If you experience errors with using this version of the method, switch to the holder version. + * The errors occur when using an untyped or unioned PromotableWriter, because this version of the + * method does not have enough information to infer the ArrowType. + * @see #write(${name}Holder) + */ + @Deprecated + @Override + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, ) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(<#list fields as field>${field.name}<#if field_has_next>, ); + } + <#elseif minor.class == "FixedSizeBinary"> + @Override + public void write(${name}Holder holder) { + ArrowType.FixedSizeBinary arrowType = new ArrowType.FixedSizeBinary(holder.byteWidth); + getWriter(MinorType.${name?upper_case}, arrowType).write(holder); + } + + /** + * @deprecated + * If you experience errors with using this version of the method, switch to the holder version. + * The errors occur when using an untyped or unioned PromotableWriter, because this version of the + * method does not have enough information to infer the ArrowType. + * @see #write(${name}Holder) + */ + @Deprecated + @Override + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, ) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(<#list fields as field>${field.name}<#if field_has_next>, ); + } + <#else> + @Override + public void write(${name}Holder holder) { + getWriter(MinorType.${name?upper_case}).write(holder); + } + + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, ) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(<#list fields as field>${field.name}<#if field_has_next>, ); + } + + + <#if minor.class?ends_with("VarBinary")> + @Override + public void write${minor.class}(byte[] value) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(value); + } + + @Override + public void write${minor.class}(byte[] value, int offset, int length) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(value, offset, length); + } + + @Override + public void write${minor.class}(ByteBuffer value) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(value); + } + + @Override + public void write${minor.class}(ByteBuffer value, int offset, int length) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(value, offset, length); + } + <#elseif minor.class?ends_with("VarChar")> + @Override + public void write${minor.class}(Text value) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(value); + } + + @Override + public void write${minor.class}(String value) { + getWriter(MinorType.${name?upper_case}).write${minor.class}(value); + } + + + + public void writeNull() { + } + + @Override + public StructWriter struct() { + return getWriter(MinorType.LIST).struct(); + } + + @Override + public ListWriter list() { + return getWriter(MinorType.LIST).list(); + } + + @Override + public MapWriter map() { + return getWriter(MinorType.LIST).map(); + } + + @Override + public MapWriter map(boolean keysSorted) { + return getWriter(MinorType.MAP, new ArrowType.Map(keysSorted)); + } + + @Override + public StructWriter struct(String name) { + return getWriter(MinorType.STRUCT).struct(name); + } + + @Override + public ListWriter list(String name) { + return getWriter(MinorType.STRUCT).list(name); + } + + @Override + public MapWriter map(String name) { + return getWriter(MinorType.STRUCT).map(name); + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + return getWriter(MinorType.STRUCT).map(name, keysSorted); + } + <#list vv.types as type><#list type.minor as minor> + <#assign lowerName = minor.class?uncap_first /> + <#if lowerName == "int" ><#assign lowerName = "integer" /> + <#assign upperName = minor.class?upper_case /> + <#assign capName = minor.class?cap_first /> + + <#if minor.typeParams?? > + @Override + public ${capName}Writer ${lowerName}(String name<#list minor.typeParams as typeParam>, ${typeParam.type} ${typeParam.name}) { + return getWriter(MinorType.STRUCT).${lowerName}(name<#list minor.typeParams as typeParam>, ${typeParam.name}); + } + + + @Override + public ${capName}Writer ${lowerName}(String name) { + return getWriter(MinorType.STRUCT).${lowerName}(name); + } + + @Override + public ${capName}Writer ${lowerName}() { + return getWriter(MinorType.LIST).${lowerName}(); + } + + + + public void copyReader(FieldReader reader) { + getWriter().copyReader(reader); + } + + public void copyReaderToField(String name, FieldReader reader) { + getWriter().copyReaderToField(name, reader); + } +} diff --git a/java/vector/target/codegen/templates/ArrowType.java b/java/vector/target/codegen/templates/ArrowType.java new file mode 100644 index 000000000000..b08d4ad0afac --- /dev/null +++ b/java/vector/target/codegen/templates/ArrowType.java @@ -0,0 +1,375 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/types/pojo/ArrowType.java" /> +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.types.pojo; + +import com.google.flatbuffers.FlatBufferBuilder; + +import java.util.Objects; + +import org.apache.arrow.flatbuf.Type; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.FieldVector; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeInfo; + +/** + * Arrow types + * Source code generated using FreeMarker template ${.template_name} + **/ +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + include = JsonTypeInfo.As.PROPERTY, + property = "name") +@JsonSubTypes({ +<#list arrowTypes.types as type> + @JsonSubTypes.Type(value = ArrowType.${type.name?remove_ending("_")}.class, name = "${type.name?remove_ending("_")?lower_case}"), + +}) +public abstract class ArrowType { + + public static abstract class PrimitiveType extends ArrowType { + + private PrimitiveType() { + } + + @Override + public boolean isComplex() { + return false; + } + } + + public static abstract class ComplexType extends ArrowType { + + private ComplexType() { + } + + @Override + public boolean isComplex() { + return true; + } + } + + public static enum ArrowTypeID { + <#list arrowTypes.types as type> + <#assign name = type.name> + ${name?remove_ending("_")}(Type.${name}), + + NONE(Type.NONE); + + private final byte flatbufType; + + public byte getFlatbufID() { + return this.flatbufType; + } + + private ArrowTypeID(byte flatbufType) { + this.flatbufType = flatbufType; + } + } + + @JsonIgnore + public abstract ArrowTypeID getTypeID(); + @JsonIgnore + public abstract boolean isComplex(); + public abstract int getType(FlatBufferBuilder builder); + public abstract T accept(ArrowTypeVisitor visitor); + + /** + * to visit the ArrowTypes + * + * type.accept(new ArrowTypeVisitor<Type>() { + * ... + * }); + * + */ + public static interface ArrowTypeVisitor { + <#list arrowTypes.types as type> + T visit(${type.name?remove_ending("_")} type); + + default T visit(ExtensionType type) { + return type.storageType().accept(this); + } + } + + /** + * to visit the Complex ArrowTypes and bundle Primitive ones in one case + */ + public static abstract class ComplexTypeVisitor implements ArrowTypeVisitor { + + public T visit(PrimitiveType type) { + throw new UnsupportedOperationException("Unexpected Primitive type: " + type); + } + + <#list arrowTypes.types as type> + <#if !type.complex> + public final T visit(${type.name?remove_ending("_")} type) { + return visit((PrimitiveType) type); + } + + + } + + /** + * to visit the Primitive ArrowTypes and bundle Complex ones under one case + */ + public static abstract class PrimitiveTypeVisitor implements ArrowTypeVisitor { + + public T visit(ComplexType type) { + throw new UnsupportedOperationException("Unexpected Complex type: " + type); + } + + <#list arrowTypes.types as type> + <#if type.complex> + public final T visit(${type.name?remove_ending("_")} type) { + return visit((ComplexType) type); + } + + + } + + <#list arrowTypes.types as type> + <#assign name = type.name?remove_ending("_")> + <#assign fields = type.fields> + public static class ${name} extends <#if type.complex>ComplexType<#else>PrimitiveType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.${name}; + <#if type.fields?size == 0> + public static final ${name} INSTANCE = new ${name}(); + <#else> + + <#list fields as field> + <#assign fieldType = field.valueType!field.type> + ${fieldType} ${field.name}; + + + + <#if type.name == "Decimal"> + // Needed to support golden file integration tests. + @JsonCreator + public static Decimal createDecimal( + @JsonProperty("precision") int precision, + @JsonProperty("scale") int scale, + @JsonProperty("bitWidth") Integer bitWidth) { + + return new Decimal(precision, scale, bitWidth == null ? 128 : bitWidth); + } + + /** + * Construct Decimal with 128 bits. + * + * This is kept mainly for the sake of backward compatibility. + * Please use {@link org.apache.arrow.vector.types.pojo.ArrowType.Decimal#Decimal(int, int, int)} instead. + * + * @deprecated This API will be removed in a future release. + */ + @Deprecated + public Decimal(int precision, int scale) { + this(precision, scale, 128); + } + + <#else> + @JsonCreator + + public ${type.name}( + <#list type.fields as field> + <#assign fieldType = field.valueType!field.type> + @JsonProperty("${field.name}") ${fieldType} ${field.name}<#if field_has_next>, + + ) { + <#list type.fields as field> + this.${field.name} = ${field.name}; + + } + + <#list fields as field> + <#assign fieldType = field.valueType!field.type> + public ${fieldType} get${field.name?cap_first}() { + return ${field.name}; + } + + + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + <#list type.fields as field> + <#if field.type == "String"> + int ${field.name} = this.${field.name} == null ? -1 : builder.createString(this.${field.name}); + + <#if field.type == "int[]"> + int ${field.name} = this.${field.name} == null ? -1 : org.apache.arrow.flatbuf.${type.name}.create${field.name?cap_first}Vector(builder, this.${field.name}); + + + org.apache.arrow.flatbuf.${type.name}.start${type.name}(builder); + <#list type.fields as field> + <#if field.type == "String" || field.type == "int[]"> + if (this.${field.name} != null) { + org.apache.arrow.flatbuf.${type.name}.add${field.name?cap_first}(builder, ${field.name}); + } + <#else> + org.apache.arrow.flatbuf.${type.name}.add${field.name?cap_first}(builder, this.${field.name}<#if field.valueType??>.getFlatbufID()); + + + return org.apache.arrow.flatbuf.${type.name}.end${type.name}(builder); + } + + public String toString() { + return "${name}" + <#if fields?size != 0> + + "(" + <#list fields as field> + + <#if field.type == "int[]">java.util.Arrays.toString(${field.name})<#else>${field.name}<#if field_has_next> + ", " + + + ")" + + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {<#list type.fields as field>${field.name}<#if field_has_next>, }); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof ${name})) { + return false; + } + <#if type.fields?size == 0> + return true; + <#else> + ${type.name} that = (${type.name}) obj; + return <#list type.fields as field>Objects.deepEquals(this.${field.name}, that.${field.name}) <#if field_has_next>&&<#else>; + + + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + + + /** + * A user-defined data type that wraps an underlying storage type. + */ + public abstract static class ExtensionType extends ComplexType { + /** The on-wire type for this user-defined type. */ + public abstract ArrowType storageType(); + /** The name of this user-defined type. Used to identify the type during serialization. */ + public abstract String extensionName(); + /** Check equality of this type to another user-defined type. */ + public abstract boolean extensionEquals(ExtensionType other); + /** Save any metadata for this type. */ + public abstract String serialize(); + /** Given saved metadata and the underlying storage type, construct a new instance of the user type. */ + public abstract ArrowType deserialize(ArrowType storageType, String serializedData); + /** Construct a vector for the user type. */ + public abstract FieldVector getNewVector(String name, FieldType fieldType, BufferAllocator allocator); + + /** The field metadata key storing the name of the extension type. */ + public static final String EXTENSION_METADATA_KEY_NAME = "ARROW:extension:name"; + /** The field metadata key storing metadata for the extension type. */ + public static final String EXTENSION_METADATA_KEY_METADATA = "ARROW:extension:metadata"; + + @Override + public ArrowTypeID getTypeID() { + return storageType().getTypeID(); + } + + @Override + public int getType(FlatBufferBuilder builder) { + return storageType().getType(builder); + } + + public String toString() { + return "ExtensionType(" + extensionName() + ", " + storageType().toString() + ")"; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {storageType(), extensionName()}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof ExtensionType)) { + return false; + } + return this.extensionEquals((ExtensionType) obj); + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + + private static final int defaultDecimalBitWidth = 128; + + public static org.apache.arrow.vector.types.pojo.ArrowType getTypeForField(org.apache.arrow.flatbuf.Field field) { + switch(field.typeType()) { + <#list arrowTypes.types as type> + <#assign name = type.name?remove_ending("_")> + <#assign nameLower = type.name?lower_case> + <#assign fields = type.fields> + case Type.${type.name}: { + org.apache.arrow.flatbuf.${type.name} ${nameLower}Type = (org.apache.arrow.flatbuf.${type.name}) field.type(new org.apache.arrow.flatbuf.${type.name}()); + <#list type.fields as field> + <#if field.type == "int[]"> + ${field.type} ${field.name} = new int[${nameLower}Type.${field.name}Length()]; + for (int i = 0; i< ${field.name}.length; ++i) { + ${field.name}[i] = ${nameLower}Type.${field.name}(i); + } + <#else> + ${field.type} ${field.name} = ${nameLower}Type.${field.name}(); + + + <#if type.name == "Decimal"> + if (bitWidth != defaultDecimalBitWidth && bitWidth != 256) { + throw new IllegalArgumentException("Library only supports 128-bit and 256-bit decimal values"); + } + + return new ${name}(<#list type.fields as field><#if field.valueType??>${field.valueType}.fromFlatbufID(${field.name})<#else>${field.name}<#if field_has_next>, ); + } + + default: + throw new UnsupportedOperationException("Unsupported type: " + field.typeType()); + } + } + + public static Int getInt(org.apache.arrow.flatbuf.Field field) { + org.apache.arrow.flatbuf.Int intType = (org.apache.arrow.flatbuf.Int) field.type(new org.apache.arrow.flatbuf.Int()); + return new Int(intType.bitWidth(), intType.isSigned()); + } +} + + diff --git a/java/vector/target/codegen/templates/BaseReader.java b/java/vector/target/codegen/templates/BaseReader.java new file mode 100644 index 000000000000..85d582a53bf5 --- /dev/null +++ b/java/vector/target/codegen/templates/BaseReader.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/reader/BaseReader.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.reader; + +<#include "/@includes/vv_imports.ftl" /> + +/** + * Source code generated using FreeMarker template ${.template_name} + */ +@SuppressWarnings("unused") +public interface BaseReader extends Positionable{ + Field getField(); + MinorType getMinorType(); + void reset(); + void read(UnionHolder holder); + void read(int index, UnionHolder holder); + void copyAsValue(UnionWriter writer); + void read(DenseUnionHolder holder); + void read(int index, DenseUnionHolder holder); + void copyAsValue(DenseUnionWriter writer); + boolean isSet(); + + public interface StructReader extends BaseReader, Iterable{ + FieldReader reader(String name); + } + + public interface RepeatedStructReader extends StructReader{ + boolean next(); + int size(); + void copyAsValue(StructWriter writer); + } + + public interface ListReader extends BaseReader{ + FieldReader reader(); + } + + public interface RepeatedListReader extends ListReader{ + boolean next(); + int size(); + void copyAsValue(ListWriter writer); + } + + public interface MapReader extends BaseReader{ + FieldReader reader(); + } + + public interface RepeatedMapReader extends MapReader{ + boolean next(); + int size(); + void copyAsValue(MapWriter writer); + } + + public interface ScalarReader extends + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> ${name}Reader, + BaseReader {} + + interface ComplexReader{ + StructReader rootAsStruct(); + ListReader rootAsList(); + boolean rootIsStruct(); + boolean ok(); + } +} + diff --git a/java/vector/target/codegen/templates/BaseWriter.java b/java/vector/target/codegen/templates/BaseWriter.java new file mode 100644 index 000000000000..35df256b324b --- /dev/null +++ b/java/vector/target/codegen/templates/BaseWriter.java @@ -0,0 +1,136 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/writer/BaseWriter.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.writer; + +<#include "/@includes/vv_imports.ftl" /> + +/* + * File generated from ${.template_name} using FreeMarker. + */ +@SuppressWarnings("unused") +public interface BaseWriter extends AutoCloseable, Positionable { + int getValueCapacity(); + void writeNull(); + + public interface StructWriter extends BaseWriter { + + Field getField(); + + /** + * Whether this writer is a struct writer and is empty (has no children). + * + *

+ * Intended only for use in determining whether to add dummy vector to + * avoid empty (zero-column) schema, as in JsonReader. + *

+ * @return whether the struct is empty + */ + boolean isEmptyStruct(); + + <#list vv.types as type><#list type.minor as minor> + <#assign lowerName = minor.class?uncap_first /> + <#if lowerName == "int" ><#assign lowerName = "integer" /> + <#assign upperName = minor.class?upper_case /> + <#assign capName = minor.class?cap_first /> + <#if minor.typeParams?? > + ${capName}Writer ${lowerName}(String name<#list minor.typeParams as typeParam>, ${typeParam.type} ${typeParam.name}); + + ${capName}Writer ${lowerName}(String name); + + + void copyReaderToField(String name, FieldReader reader); + StructWriter struct(String name); + ListWriter list(String name); + MapWriter map(String name); + MapWriter map(String name, boolean keysSorted); + void start(); + void end(); + } + + public interface ListWriter extends BaseWriter { + void startList(); + void endList(); + StructWriter struct(); + ListWriter list(); + MapWriter map(); + MapWriter map(boolean keysSorted); + void copyReader(FieldReader reader); + + <#list vv.types as type><#list type.minor as minor> + <#assign lowerName = minor.class?uncap_first /> + <#if lowerName == "int" ><#assign lowerName = "integer" /> + <#assign upperName = minor.class?upper_case /> + <#assign capName = minor.class?cap_first /> + ${capName}Writer ${lowerName}(); + + } + + public interface MapWriter extends ListWriter { + void startMap(); + void endMap(); + + void startEntry(); + void endEntry(); + + MapWriter key(); + MapWriter value(); + } + + public interface ScalarWriter extends + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> ${name}Writer, BaseWriter {} + + public interface ComplexWriter { + void allocate(); + void clear(); + void copyReader(FieldReader reader); + StructWriter rootAsStruct(); + ListWriter rootAsList(); + MapWriter rootAsMap(boolean keysSorted); + + void setPosition(int index); + void setValueCount(int count); + void reset(); + } + + public interface StructOrListWriter { + void start(); + void end(); + StructOrListWriter struct(String name); + /** + * @deprecated use {@link #listOfStruct()} instead. + */ + StructOrListWriter listoftstruct(String name); + StructOrListWriter listOfStruct(String name); + StructOrListWriter list(String name); + boolean isStructWriter(); + boolean isListWriter(); + VarCharWriter varChar(String name); + IntWriter integer(String name); + BigIntWriter bigInt(String name); + Float4Writer float4(String name); + Float8Writer float8(String name); + BitWriter bit(String name); + VarBinaryWriter binary(String name); + } +} diff --git a/java/vector/target/codegen/templates/CaseSensitiveStructWriters.java b/java/vector/target/codegen/templates/CaseSensitiveStructWriters.java new file mode 100644 index 000000000000..cc0dd7b335c5 --- /dev/null +++ b/java/vector/target/codegen/templates/CaseSensitiveStructWriters.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +<@pp.dropOutputFile /> +<#list ["Nullable", "Single"] as mode> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/${mode}CaseSensitiveStructWriter.java" /> +<#assign index = "idx()"> +<#if mode == "Single"> +<#assign containerClass = "NonNullableStructVector" /> +<#else> +<#assign containerClass = "StructVector" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> +/* + * This class is generated using FreeMarker and the ${.template_name} template. + */ +@SuppressWarnings("unused") +public class ${mode}CaseSensitiveStructWriter extends ${mode}StructWriter { + public ${mode}CaseSensitiveStructWriter(${containerClass} container) { + super(container); + } + + @Override + protected String handleCase(final String input){ + return input; + } + + @Override + protected NullableStructWriterFactory getNullableStructWriterFactory() { + return NullableStructWriterFactory.getNullableCaseSensitiveStructWriterFactoryInstance(); + } + +} + diff --git a/java/vector/target/codegen/templates/ComplexCopier.java b/java/vector/target/codegen/templates/ComplexCopier.java new file mode 100644 index 000000000000..1a3ba940e797 --- /dev/null +++ b/java/vector/target/codegen/templates/ComplexCopier.java @@ -0,0 +1,210 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.arrow.vector.complex.impl.UnionMapReader; +import org.apache.arrow.vector.complex.reader.FieldReader; +import org.apache.arrow.vector.complex.writer.FieldWriter; +import org.apache.arrow.vector.types.Types; + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/ComplexCopier.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + +/* + * This class is generated using freemarker and the ${.template_name} template. + */ +@SuppressWarnings("unused") +public class ComplexCopier { + + /** + * Do a deep copy of the value in input into output + * @param input field to read from + * @param output field to write to + */ + public static void copy(FieldReader input, FieldWriter output) { + writeValue(input, output); + } + + private static void writeValue(FieldReader reader, FieldWriter writer) { + final MinorType mt = reader.getMinorType(); + + switch (mt) { + + case LIST: + case LARGELIST: + case FIXED_SIZE_LIST: + if (reader.isSet()) { + writer.startList(); + while (reader.next()) { + FieldReader childReader = reader.reader(); + FieldWriter childWriter = getListWriterForReader(childReader, writer); + if (childReader.isSet()) { + writeValue(childReader, childWriter); + } else { + childWriter.writeNull(); + } + } + writer.endList(); + } else { + writer.writeNull(); + } + break; + case MAP: + if (reader.isSet()) { + UnionMapReader mapReader = (UnionMapReader) reader; + writer.startMap(); + while (mapReader.next()) { + FieldReader structReader = reader.reader(); + if (structReader.isSet()) { + writer.startEntry(); + writeValue(mapReader.key(), getMapWriterForReader(mapReader.key(), writer.key())); + writeValue(mapReader.value(), getMapWriterForReader(mapReader.value(), writer.value())); + writer.endEntry(); + } else { + writer.writeNull(); + } + } + writer.endMap(); + } else { + writer.writeNull(); + } + break; + case STRUCT: + if (reader.isSet()) { + writer.start(); + for(String name : reader){ + FieldReader childReader = reader.reader(name); + if (childReader.getMinorType() != Types.MinorType.NULL) { + FieldWriter childWriter = getStructWriterForReader(childReader, writer, name); + if (childReader.isSet()) { + writeValue(childReader, childWriter); + } else { + childWriter.writeNull(); + } + } + } + writer.end(); + } else { + writer.writeNull(); + } + break; + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") > + + case ${name?upper_case}: + if (reader.isSet()) { + Nullable${name}Holder ${uncappedName}Holder = new Nullable${name}Holder(); + reader.read(${uncappedName}Holder); + if (${uncappedName}Holder.isSet == 1) { + writer.write${name}(<#list fields as field>${uncappedName}Holder.${field.name}<#if field_has_next>, <#if minor.class?starts_with("Decimal")>, new ArrowType.Decimal(${uncappedName}Holder.precision, ${uncappedName}Holder.scale, ${name}Holder.WIDTH * 8)); + } + } else { + writer.writeNull(); + } + break; + + + + } + } + + private static FieldWriter getStructWriterForReader(FieldReader reader, StructWriter writer, String name) { + switch (reader.getMinorType()) { + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams??> + case ${name?upper_case}: + return (FieldWriter) writer.<#if name == "Int">integer<#else>${uncappedName}(name); + + <#if minor.class?starts_with("Decimal")> + case ${name?upper_case}: + if (reader.getField().getType() instanceof ArrowType.Decimal) { + ArrowType.Decimal type = (ArrowType.Decimal) reader.getField().getType(); + return (FieldWriter) writer.${uncappedName}(name, type.getScale(), type.getPrecision()); + } else { + return (FieldWriter) writer.${uncappedName}(name); + } + + + + case STRUCT: + return (FieldWriter) writer.struct(name); + case FIXED_SIZE_LIST: + case LIST: + return (FieldWriter) writer.list(name); + case MAP: + return (FieldWriter) writer.map(name); + default: + throw new UnsupportedOperationException(reader.getMinorType().toString()); + } + } + + private static FieldWriter getListWriterForReader(FieldReader reader, ListWriter writer) { + switch (reader.getMinorType()) { + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") > + case ${name?upper_case}: + return (FieldWriter) writer.<#if name == "Int">integer<#else>${uncappedName}(); + + + case STRUCT: + return (FieldWriter) writer.struct(); + case FIXED_SIZE_LIST: + case LIST: + case MAP: + case NULL: + return (FieldWriter) writer.list(); + default: + throw new UnsupportedOperationException(reader.getMinorType().toString()); + } + } + + private static FieldWriter getMapWriterForReader(FieldReader reader, MapWriter writer) { + switch (reader.getMinorType()) { + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") > + case ${name?upper_case}: + return (FieldWriter) writer.<#if name == "Int">integer<#else>${uncappedName}(); + + + case STRUCT: + return (FieldWriter) writer.struct(); + case FIXED_SIZE_LIST: + case LIST: + case NULL: + return (FieldWriter) writer.list(); + case MAP: + return (FieldWriter) writer.map(false); + default: + throw new UnsupportedOperationException(reader.getMinorType().toString()); + } + } +} diff --git a/java/vector/target/codegen/templates/ComplexReaders.java b/java/vector/target/codegen/templates/ComplexReaders.java new file mode 100644 index 000000000000..48fb6603ad5e --- /dev/null +++ b/java/vector/target/codegen/templates/ComplexReaders.java @@ -0,0 +1,147 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.lang.Override; +import java.util.List; + +import org.apache.arrow.record.TransferPair; +import org.apache.arrow.vector.complex.IndexHolder; +import org.apache.arrow.vector.complex.writer.IntervalWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; + +<@pp.dropOutputFile /> +<#list vv.types as type> +<#list type.minor as minor> +<#list [""] as mode> +<#assign lowerName = minor.class?uncap_first /> +<#if lowerName == "int" ><#assign lowerName = "integer" /> +<#assign name = minor.class?cap_first /> +<#assign javaType = (minor.javaType!type.javaType) /> +<#assign friendlyType = (minor.friendlyType!minor.boxedType!type.boxedType) /> +<#assign safeType=friendlyType /> +<#if safeType=="byte[]"><#assign safeType="ByteArray" /> + +<#assign hasFriendly = minor.friendlyType!"no" == "no" /> + +<#list ["Nullable"] as nullMode> +<#if mode == "" > +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/${name}ReaderImpl.java" /> +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + +/** + * Source code generated using FreeMarker template ${.template_name} + */ +@SuppressWarnings("unused") +public class ${name}ReaderImpl extends AbstractFieldReader { + + private final ${name}Vector vector; + + public ${name}ReaderImpl(${name}Vector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(${minor.class?cap_first}Writer writer){ + ${minor.class?cap_first}WriterImpl impl = (${minor.class?cap_first}WriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + ${minor.class?cap_first}WriterImpl impl = (${minor.class?cap_first}WriterImpl) writer.${lowerName}(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + <#if nullMode != "Nullable"> + public void read(${minor.class?cap_first}Holder h){ + vector.get(idx(), h); + } + + + public void read(Nullable${minor.class?cap_first}Holder h){ + vector.get(idx(), h); + } + + public ${friendlyType} read${safeType}(){ + return vector.getObject(idx()); + } + + <#if minor.class == "TimeStampSec" || + minor.class == "TimeStampMilli" || + minor.class == "TimeStampMicro" || + minor.class == "TimeStampNano"> + @Override + public ${minor.boxedType} read${minor.boxedType}(){ + return vector.get(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} + + +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/reader/${name}Reader.java" /> +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.reader; + +<#include "/@includes/vv_imports.ftl" /> +/** + * Source code generated using FreeMarker template ${.template_name} + */ +@SuppressWarnings("unused") +public interface ${name}Reader extends BaseReader{ + + public void read(${minor.class?cap_first}Holder h); + public void read(Nullable${minor.class?cap_first}Holder h); + public Object readObject(); + // read friendly type + public ${friendlyType} read${safeType}(); + public boolean isSet(); + public void copyAsValue(${minor.class}Writer writer); + public void copyAsField(String name, ${minor.class}Writer writer); + +} + + + + + + + + diff --git a/java/vector/target/codegen/templates/ComplexWriters.java b/java/vector/target/codegen/templates/ComplexWriters.java new file mode 100644 index 000000000000..2e3caae1f0f2 --- /dev/null +++ b/java/vector/target/codegen/templates/ComplexWriters.java @@ -0,0 +1,280 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +<@pp.dropOutputFile /> +<#list vv.types as type> +<#list type.minor as minor> +<#list ["Nullable"] as mode> +<#assign name = minor.class?cap_first /> +<#assign eName = name /> +<#assign javaType = (minor.javaType!type.javaType) /> +<#assign fields = minor.fields!type.fields /> +<#assign friendlyType = (minor.friendlyType!minor.boxedType!type.boxedType) /> + +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/${eName}WriterImpl.java" /> +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + +<#function is_timestamp_tz type> + <#return type?starts_with("TimeStamp") && type?ends_with("TZ")> + + +/* + * This class is generated using FreeMarker on the ${.template_name} template. + */ +@SuppressWarnings("unused") +public class ${eName}WriterImpl extends AbstractFieldWriter { + + final ${name}Vector vector; + +<#if minor.class?ends_with("VarChar")> + private final Text textBuffer = new Text(); + + +public ${eName}WriterImpl(${name}Vector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + <#if mode == "Repeated"> + + public void write(${minor.class?cap_first}Holder h) { + mutator.addSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(${minor.class?cap_first}Holder h) { + mutator.addSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, ) { + mutator.addSafe(idx(), <#list fields as field>${field.name}<#if field_has_next>, ); + vector.setValueCount(idx()+1); + } + + public void setPosition(int idx) { + super.setPosition(idx); + mutator.startNewValue(idx); + } + + + <#else> + + <#if !minor.class?starts_with("Decimal")> + public void write(${minor.class}Holder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(Nullable${minor.class}Holder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, ) { + vector.setSafe(idx(), 1<#list fields as field><#if field.include!true >, ${field.name}); + vector.setValueCount(idx()+1); + } + + + <#if minor.class?ends_with("VarChar")> + @Override + public void write${minor.class}(${friendlyType} value) { + vector.setSafe(idx(), value); + vector.setValueCount(idx()+1); + } + + @Override + public void write${minor.class}(String value) { + textBuffer.set(value); + vector.setSafe(idx(), textBuffer); + vector.setValueCount(idx()+1); + } + + + <#if minor.class?starts_with("Decimal")> + + public void write(${minor.class}Holder h){ + DecimalUtility.checkPrecisionAndScale(h.precision, h.scale, vector.getPrecision(), vector.getScale()); + vector.setSafe(idx(), h); + vector.setValueCount(idx() + 1); + } + + public void write(Nullable${minor.class}Holder h){ + if (h.isSet == 1) { + DecimalUtility.checkPrecisionAndScale(h.precision, h.scale, vector.getPrecision(), vector.getScale()); + } + vector.setSafe(idx(), h); + vector.setValueCount(idx() + 1); + } + + public void write${minor.class}(long start, ArrowBuf buffer){ + vector.setSafe(idx(), 1, start, buffer); + vector.setValueCount(idx() + 1); + } + + public void write${minor.class}(long start, ArrowBuf buffer, ArrowType arrowType){ + DecimalUtility.checkPrecisionAndScale(((ArrowType.Decimal) arrowType).getPrecision(), + ((ArrowType.Decimal) arrowType).getScale(), vector.getPrecision(), vector.getScale()); + vector.setSafe(idx(), 1, start, buffer); + vector.setValueCount(idx() + 1); + } + + public void write${minor.class}(BigDecimal value){ + // vector.setSafe already does precision and scale checking + vector.setSafe(idx(), value); + vector.setValueCount(idx() + 1); + } + + public void writeBigEndianBytesTo${minor.class}(byte[] value, ArrowType arrowType){ + DecimalUtility.checkPrecisionAndScale(((ArrowType.Decimal) arrowType).getPrecision(), + ((ArrowType.Decimal) arrowType).getScale(), vector.getPrecision(), vector.getScale()); + vector.setBigEndianSafe(idx(), value); + vector.setValueCount(idx() + 1); + } + + public void writeBigEndianBytesTo${minor.class}(byte[] value){ + vector.setBigEndianSafe(idx(), value); + vector.setValueCount(idx() + 1); + } + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + + + <#if minor.class?ends_with("VarBinary")> + public void write${minor.class}(byte[] value) { + vector.setSafe(idx(), value); + vector.setValueCount(idx() + 1); + } + + public void write${minor.class}(byte[] value, int offset, int length) { + vector.setSafe(idx(), value, offset, length); + vector.setValueCount(idx() + 1); + } + + public void write${minor.class}(ByteBuffer value) { + vector.setSafe(idx(), value, 0, value.remaining()); + vector.setValueCount(idx() + 1); + } + + public void write${minor.class}(ByteBuffer value, int offset, int length) { + vector.setSafe(idx(), value, offset, length); + vector.setValueCount(idx() + 1); + } + +} + +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/writer/${eName}Writer.java" /> +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.writer; + +<#include "/@includes/vv_imports.ftl" /> +/* + * This class is generated using FreeMarker on the ${.template_name} template. + */ +@SuppressWarnings("unused") +public interface ${eName}Writer extends BaseWriter { + public void write(${minor.class}Holder h); + +<#if minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + /** + * @deprecated + * The holder version should be used instead because the plain value version does not contain enough information + * to fully specify this field type. + * @see #write(${minor.class}Holder) + */ + @Deprecated + + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, ); +<#if minor.class?starts_with("Decimal")> + + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, , ArrowType arrowType); + + public void write${minor.class}(${friendlyType} value); + + public void writeBigEndianBytesTo${minor.class}(byte[] value, ArrowType arrowType); + + /** + * @deprecated + * Use either the version that additionally takes in an ArrowType or use the holder version. + * This version does not contain enough information to fully specify this field type. + * @see #writeBigEndianBytesTo${minor.class}(byte[], ArrowType) + * @see #write(${minor.class}Holder) + */ + @Deprecated + public void writeBigEndianBytesTo${minor.class}(byte[] value); + + +<#if minor.class?ends_with("VarBinary")> + public void write${minor.class}(byte[] value); + + public void write${minor.class}(byte[] value, int offset, int length); + + public void write${minor.class}(ByteBuffer value); + + public void write${minor.class}(ByteBuffer value, int offset, int length); + + +<#if minor.class?ends_with("VarChar")> + public void write${minor.class}(${friendlyType} value); + + public void write${minor.class}(String value); + +} + + + + diff --git a/java/vector/target/codegen/templates/DenseUnionReader.java b/java/vector/target/codegen/templates/DenseUnionReader.java new file mode 100644 index 000000000000..a085e03ea64e --- /dev/null +++ b/java/vector/target/codegen/templates/DenseUnionReader.java @@ -0,0 +1,229 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +import org.apache.arrow.vector.complex.impl.UnionListReader; +import org.apache.arrow.vector.types.Types.MinorType; +import org.apache.arrow.vector.types.pojo.Field; + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/DenseUnionReader.java" /> + + +<#include "/@includes/license.ftl" /> + + package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> +/** + * Source code generated using FreeMarker template ${.template_name} + */ +@SuppressWarnings("unused") +public class DenseUnionReader extends AbstractFieldReader { + + private BaseReader[] readers = new BaseReader[Byte.MAX_VALUE + 1]; + public DenseUnionVector data; + + public DenseUnionReader(DenseUnionVector data) { + this.data = data; + } + + public MinorType getMinorType() { + byte typeId = data.getTypeId(idx()); + return data.getVectorByType(typeId).getMinorType(); + } + + public byte getTypeId() { + return data.getTypeId(idx()); + } + + @Override + public Field getField() { + return data.getField(); + } + + public boolean isSet(){ + return !data.isNull(idx()); + } + + public void read(DenseUnionHolder holder) { + holder.reader = this; + holder.isSet = this.isSet() ? 1 : 0; + holder.typeId = getTypeId(); + } + + public void read(int index, UnionHolder holder) { + byte typeId = data.getTypeId(index); + getList(typeId).read(index, holder); + } + + private FieldReader getReaderForIndex(int index) { + byte typeId = data.getTypeId(index); + MinorType minorType = data.getVectorByType(typeId).getMinorType(); + FieldReader reader = (FieldReader) readers[typeId]; + if (reader != null) { + return reader; + } + switch (minorType) { + case NULL: + reader = NullReader.INSTANCE; + break; + case STRUCT: + reader = (FieldReader) getStruct(typeId); + break; + case LIST: + reader = (FieldReader) getList(typeId); + break; + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal")> + case ${name?upper_case}: + reader = (FieldReader) get${name}(typeId); + break; + + + + default: + throw new UnsupportedOperationException("Unsupported type: " + MinorType.values()[typeId]); + } + return reader; + } + + private SingleStructReaderImpl structReader; + + private StructReader getStruct(byte typeId) { + StructReader structReader = (StructReader) readers[typeId]; + if (structReader == null) { + structReader = (SingleStructReaderImpl) data.getVectorByType(typeId).getReader(); + structReader.setPosition(idx()); + readers[typeId] = structReader; + } + return structReader; + } + + private UnionListReader listReader; + + private FieldReader getList(byte typeId) { + UnionListReader listReader = (UnionListReader) readers[typeId]; + if (listReader == null) { + listReader = new UnionListReader((ListVector) data.getVectorByType(typeId)); + listReader.setPosition(idx()); + readers[typeId] = listReader; + } + return listReader; + } + + private UnionMapReader mapReader; + + private FieldReader getMap(byte typeId) { + UnionMapReader mapReader = (UnionMapReader) readers[typeId]; + if (mapReader == null) { + mapReader = new UnionMapReader((MapVector) data.getVectorByType(typeId)); + mapReader.setPosition(idx()); + readers[typeId] = mapReader; + } + return mapReader; + } + + @Override + public java.util.Iterator iterator() { + throw new UnsupportedOperationException(); + } + + @Override + public void copyAsValue(UnionWriter writer) { + writer.data.copyFrom(idx(), writer.idx(), data); + } + + <#list ["Object", "BigDecimal", "Short", "Integer", "Long", "Boolean", + "LocalDateTime", "Duration", "Period", "Double", "Float", + "Character", "Text", "Byte", "byte[]", "PeriodDuration"] as friendlyType> + <#assign safeType=friendlyType /> + <#if safeType=="byte[]"><#assign safeType="ByteArray" /> + + @Override + public ${friendlyType} read${safeType}() { + return getReaderForIndex(idx()).read${safeType}(); + } + + + + public int size() { + return getReaderForIndex(idx()).size(); + } + + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign uncappedName = name?uncap_first/> + <#assign boxedType = (minor.boxedType!type.boxedType) /> + <#assign javaType = (minor.javaType!type.javaType) /> + <#assign friendlyType = (minor.friendlyType!minor.boxedType!type.boxedType) /> + <#assign safeType=friendlyType /> + <#if safeType=="byte[]"><#assign safeType="ByteArray" /> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal")> + + private ${name}ReaderImpl get${name}(byte typeId) { + ${name}ReaderImpl reader = (${name}ReaderImpl) readers[typeId]; + if (reader == null) { + reader = new ${name}ReaderImpl((${name}Vector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(Nullable${name}Holder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(${name}Writer writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + + + + @Override + public void copyAsValue(ListWriter writer) { + ComplexCopier.copy(this, (FieldWriter) writer); + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + byte typeId = data.getTypeId(index); + if (readers[typeId] != null) { + int offset = data.getOffset(index); + readers[typeId].setPosition(offset); + } + } + + public FieldReader reader(byte typeId, String name){ + return getStruct(typeId).reader(name); + } + + public FieldReader reader(byte typeId) { + return getList(typeId).reader(); + } + + public boolean next() { + return getReaderForIndex(idx()).next(); + } +} diff --git a/java/vector/target/codegen/templates/DenseUnionVector.java b/java/vector/target/codegen/templates/DenseUnionVector.java new file mode 100644 index 000000000000..42e96f7aca33 --- /dev/null +++ b/java/vector/target/codegen/templates/DenseUnionVector.java @@ -0,0 +1,997 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.arrow.memory.ArrowBuf; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.memory.ReferenceManager; +import org.apache.arrow.memory.util.CommonUtil; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.BaseValueVector; +import org.apache.arrow.vector.BitVectorHelper; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.ValueVector; +import org.apache.arrow.vector.complex.AbstractStructVector; +import org.apache.arrow.vector.complex.ListVector; +import org.apache.arrow.vector.complex.NonNullableStructVector; +import org.apache.arrow.vector.complex.StructVector; +import org.apache.arrow.vector.compare.VectorVisitor; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.UnionMode; +import org.apache.arrow.vector.compare.RangeEqualsVisitor; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.types.pojo.FieldType; +import org.apache.arrow.vector.util.CallBack; +import org.apache.arrow.vector.util.DataSizeRoundingUtil; +import org.apache.arrow.vector.util.TransferPair; + +import java.util.Arrays; +import java.util.stream.Collectors; + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/DenseUnionVector.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex; + +<#include "/@includes/vv_imports.ftl" /> +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import org.apache.arrow.memory.util.CommonUtil; +import org.apache.arrow.memory.util.hash.ArrowBufHasher; +import org.apache.arrow.memory.util.hash.SimpleHasher; +import org.apache.arrow.vector.compare.VectorVisitor; +import org.apache.arrow.vector.complex.impl.ComplexCopier; +import org.apache.arrow.vector.util.CallBack; +import org.apache.arrow.vector.ipc.message.ArrowFieldNode; +import org.apache.arrow.vector.BaseValueVector; +import org.apache.arrow.vector.util.OversizedAllocationException; +import org.apache.arrow.util.Preconditions; + +import static org.apache.arrow.vector.types.UnionMode.Dense; + + + +/* + * This class is generated using freemarker and the ${.template_name} template. + */ +@SuppressWarnings("unused") + + +/** + * A vector which can hold values of different types. It does so by using a StructVector which contains a vector for each + * primitive type that is stored. StructVector is used in order to take advantage of its serialization/deserialization methods, + * as well as the addOrGet method. + * + * For performance reasons, DenseUnionVector stores a cached reference to each subtype vector, to avoid having to do the struct lookup + * each time the vector is accessed. + * Source code generated using FreeMarker template ${.template_name} + */ +public class DenseUnionVector extends AbstractContainerVector implements FieldVector { + int valueCount; + + NonNullableStructVector internalStruct; + private ArrowBuf typeBuffer; + private ArrowBuf offsetBuffer; + + /** + * The key is type Id, and the value is vector. + */ + private ValueVector[] childVectors = new ValueVector[Byte.MAX_VALUE + 1]; + + /** + * The index is the type id, and the value is the type field. + */ + private Field[] typeFields = new Field[Byte.MAX_VALUE + 1]; + /** + * The index is the index into the typeFields array, and the value is the logical field id. + */ + private byte[] typeMapFields = new byte[Byte.MAX_VALUE + 1]; + + /** + * The next type id to allocate. + */ + private byte nextTypeId = 0; + + private FieldReader reader; + + private long typeBufferAllocationSizeInBytes; + private long offsetBufferAllocationSizeInBytes; + + private final FieldType fieldType; + + public static final byte TYPE_WIDTH = 1; + public static final byte OFFSET_WIDTH = 4; + + private static final FieldType INTERNAL_STRUCT_TYPE = new FieldType(/*nullable*/ false, + ArrowType.Struct.INSTANCE, /*dictionary*/ null, /*metadata*/ null); + + public static DenseUnionVector empty(String name, BufferAllocator allocator) { + FieldType fieldType = FieldType.notNullable(new ArrowType.Union( + UnionMode.Dense, null)); + return new DenseUnionVector(name, allocator, fieldType, null); + } + + public DenseUnionVector(String name, BufferAllocator allocator, FieldType fieldType, CallBack callBack) { + super(name, allocator, callBack); + this.fieldType = fieldType; + this.internalStruct = new NonNullableStructVector( + "internal", + allocator, + INTERNAL_STRUCT_TYPE, + callBack, + AbstractStructVector.ConflictPolicy.CONFLICT_REPLACE, + false); + this.typeBuffer = allocator.getEmpty(); + this.typeBufferAllocationSizeInBytes = BaseValueVector.INITIAL_VALUE_ALLOCATION * TYPE_WIDTH; + this.offsetBuffer = allocator.getEmpty(); + this.offsetBufferAllocationSizeInBytes = BaseValueVector.INITIAL_VALUE_ALLOCATION * OFFSET_WIDTH; + } + + public BufferAllocator getAllocator() { + return allocator; + } + + @Override + public MinorType getMinorType() { + return MinorType.DENSEUNION; + } + + @Override + public void initializeChildrenFromFields(List children) { + for (Field field : children) { + byte typeId = registerNewTypeId(field); + FieldVector vector = (FieldVector) internalStruct.add(field.getName(), field.getFieldType()); + vector.initializeChildrenFromFields(field.getChildren()); + childVectors[typeId] = vector; + } + } + + @Override + public List getChildrenFromFields() { + return internalStruct.getChildrenFromFields(); + } + + @Override + public void loadFieldBuffers(ArrowFieldNode fieldNode, List ownBuffers) { + if (ownBuffers.size() != 2) { + throw new IllegalArgumentException("Illegal buffer count for dense union with type " + getField().getFieldType() + + ", expected " + 2 + ", got: " + ownBuffers.size()); + } + + ArrowBuf buffer = ownBuffers.get(0); + typeBuffer.getReferenceManager().release(); + typeBuffer = buffer.getReferenceManager().retain(buffer, allocator); + typeBufferAllocationSizeInBytes = typeBuffer.capacity(); + + buffer = ownBuffers.get(1); + offsetBuffer.getReferenceManager().release(); + offsetBuffer = buffer.getReferenceManager().retain(buffer, allocator); + offsetBufferAllocationSizeInBytes = offsetBuffer.capacity(); + + this.valueCount = fieldNode.getLength(); + } + + @Override + public List getFieldBuffers() { + List result = new ArrayList<>(2); + setReaderAndWriterIndex(); + result.add(typeBuffer); + result.add(offsetBuffer); + + return result; + } + + private void setReaderAndWriterIndex() { + typeBuffer.readerIndex(0); + typeBuffer.writerIndex(valueCount * TYPE_WIDTH); + + offsetBuffer.readerIndex(0); + offsetBuffer.writerIndex((long) valueCount * OFFSET_WIDTH); + } + + /** + * Get the inner vectors. + * + * @deprecated This API will be removed as the current implementations no longer support inner vectors. + * + * @return the inner vectors for this field as defined by the TypeLayout + */ + @Override + @Deprecated + public List getFieldInnerVectors() { + throw new UnsupportedOperationException("There are no inner vectors. Use geFieldBuffers"); + } + + private String fieldName(byte typeId, MinorType type) { + return type.name().toLowerCase() + typeId; + } + + private FieldType fieldType(MinorType type) { + return FieldType.nullable(type.getType()); + } + + public synchronized byte registerNewTypeId(Field field) { + if (nextTypeId == typeFields.length) { + throw new IllegalStateException("Dense union vector support at most " + + typeFields.length + " relative types. Please use union of union instead"); + } + byte typeId = nextTypeId; + if (this.fieldType != null) { + int[] typeIds = ((ArrowType.Union) this.fieldType.getType()).getTypeIds(); + if (typeIds != null) { + int thisTypeId = typeIds[nextTypeId]; + if (thisTypeId > Byte.MAX_VALUE) { + throw new IllegalStateException("Dense union vector types must be bytes. " + thisTypeId + " is too large"); + } + typeId = (byte) thisTypeId; + } + } + typeFields[typeId] = field; + typeMapFields[nextTypeId] = typeId; + this.nextTypeId += 1; + return typeId; + } + + private T addOrGet(byte typeId, MinorType minorType, Class c) { + return internalStruct.addOrGet(fieldName(typeId, minorType), fieldType(minorType), c); + } + + private T addOrGet(byte typeId, MinorType minorType, ArrowType arrowType, Class c) { + return internalStruct.addOrGet(fieldName(typeId, minorType), FieldType.nullable(arrowType), c); + } + + @Override + public long getOffsetBufferAddress() { + return offsetBuffer.memoryAddress(); + } + + @Override + public long getDataBufferAddress() { + throw new UnsupportedOperationException(); + } + + @Override + public long getValidityBufferAddress() { + throw new UnsupportedOperationException(); + } + + @Override + public ArrowBuf getValidityBuffer() { throw new UnsupportedOperationException(); } + + @Override + public ArrowBuf getOffsetBuffer() { return offsetBuffer; } + + public ArrowBuf getTypeBuffer() { return typeBuffer; } + + @Override + public ArrowBuf getDataBuffer() { throw new UnsupportedOperationException(); } + + public StructVector getStruct(byte typeId) { + StructVector structVector = typeId < 0 ? null : (StructVector) childVectors[typeId]; + if (structVector == null) { + int vectorCount = internalStruct.size(); + structVector = addOrGet(typeId, MinorType.STRUCT, StructVector.class); + if (internalStruct.size() > vectorCount) { + structVector.allocateNew(); + childVectors[typeId] = structVector; + if (callBack != null) { + callBack.doWork(); + } + } + } + return structVector; + } + + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#assign lowerCaseName = name?lower_case/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal")> + + public ${name}Vector get${name}Vector(byte typeId<#if minor.class?starts_with("Decimal")>, ArrowType arrowType) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.${name?upper_case}<#if minor.class?starts_with("Decimal")>, arrowType, ${name}Vector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (${name}Vector) vector; + } + + + + + public ListVector getList(byte typeId) { + ListVector listVector = typeId < 0 ? null : (ListVector) childVectors[typeId]; + if (listVector == null) { + int vectorCount = internalStruct.size(); + listVector = addOrGet(typeId, MinorType.LIST, ListVector.class); + if (internalStruct.size() > vectorCount) { + listVector.allocateNew(); + childVectors[typeId] = listVector; + if (callBack != null) { + callBack.doWork(); + } + } + } + return listVector; + } + + public MapVector getMap(byte typeId) { + MapVector mapVector = typeId < 0 ? null : (MapVector) childVectors[typeId]; + if (mapVector == null) { + int vectorCount = internalStruct.size(); + mapVector = addOrGet(typeId, MinorType.MAP, MapVector.class); + if (internalStruct.size() > vectorCount) { + mapVector.allocateNew(); + childVectors[typeId] = mapVector; + if (callBack != null) { + callBack.doWork(); + } + } + } + return mapVector; + } + + public byte getTypeId(int index) { + return typeBuffer.getByte(index * TYPE_WIDTH); + } + + public ValueVector getVectorByType(byte typeId) { + return typeId < 0 ? null : childVectors[typeId]; + } + + @Override + public void allocateNew() throws OutOfMemoryException { + /* new allocation -- clear the current buffers */ + clear(); + internalStruct.allocateNew(); + try { + allocateTypeBuffer(); + allocateOffsetBuffer(); + } catch (Exception e) { + clear(); + throw e; + } + } + + @Override + public boolean allocateNewSafe() { + /* new allocation -- clear the current buffers */ + clear(); + boolean safe = internalStruct.allocateNewSafe(); + if (!safe) { return false; } + try { + allocateTypeBuffer(); + allocateOffsetBuffer(); + } catch (Exception e) { + clear(); + return false; + } + + return true; + } + + private void allocateTypeBuffer() { + typeBuffer = allocator.buffer(typeBufferAllocationSizeInBytes); + typeBuffer.readerIndex(0); + setNegative(0, typeBuffer.capacity()); + } + + private void allocateOffsetBuffer() { + offsetBuffer = allocator.buffer(offsetBufferAllocationSizeInBytes); + offsetBuffer.readerIndex(0); + offsetBuffer.setZero(0, offsetBuffer.capacity()); + } + + + @Override + public void reAlloc() { + internalStruct.reAlloc(); + reallocTypeBuffer(); + reallocOffsetBuffer(); + } + + public int getOffset(int index) { + return offsetBuffer.getInt((long) index * OFFSET_WIDTH); + } + + private void reallocTypeBuffer() { + final long currentBufferCapacity = typeBuffer.capacity(); + long newAllocationSize = currentBufferCapacity * 2; + if (newAllocationSize == 0) { + if (typeBufferAllocationSizeInBytes > 0) { + newAllocationSize = typeBufferAllocationSizeInBytes; + } else { + newAllocationSize = BaseValueVector.INITIAL_VALUE_ALLOCATION * TYPE_WIDTH * 2; + } + } + + newAllocationSize = CommonUtil.nextPowerOfTwo(newAllocationSize); + assert newAllocationSize >= 1; + + if (newAllocationSize > BaseValueVector.MAX_ALLOCATION_SIZE) { + throw new OversizedAllocationException("Unable to expand the buffer"); + } + + final ArrowBuf newBuf = allocator.buffer((int)newAllocationSize); + newBuf.setBytes(0, typeBuffer, 0, currentBufferCapacity); + typeBuffer.getReferenceManager().release(1); + typeBuffer = newBuf; + typeBufferAllocationSizeInBytes = (int)newAllocationSize; + setNegative(currentBufferCapacity, newBuf.capacity() - currentBufferCapacity); + } + + private void reallocOffsetBuffer() { + final long currentBufferCapacity = offsetBuffer.capacity(); + long newAllocationSize = currentBufferCapacity * 2; + if (newAllocationSize == 0) { + if (offsetBufferAllocationSizeInBytes > 0) { + newAllocationSize = offsetBufferAllocationSizeInBytes; + } else { + newAllocationSize = BaseValueVector.INITIAL_VALUE_ALLOCATION * OFFSET_WIDTH * 2; + } + } + + newAllocationSize = CommonUtil.nextPowerOfTwo(newAllocationSize); + assert newAllocationSize >= 1; + + if (newAllocationSize > BaseValueVector.MAX_ALLOCATION_SIZE) { + throw new OversizedAllocationException("Unable to expand the buffer"); + } + + final ArrowBuf newBuf = allocator.buffer((int) newAllocationSize); + newBuf.setBytes(0, offsetBuffer, 0, currentBufferCapacity); + newBuf.setZero(currentBufferCapacity, newBuf.capacity() - currentBufferCapacity); + offsetBuffer.getReferenceManager().release(1); + offsetBuffer = newBuf; + offsetBufferAllocationSizeInBytes = (int) newAllocationSize; + } + + @Override + public void setInitialCapacity(int numRecords) { } + + @Override + public int getValueCapacity() { + long capacity = getTypeBufferValueCapacity(); + long offsetCapacity = getOffsetBufferValueCapacity(); + if (offsetCapacity < capacity) { + capacity = offsetCapacity; + } + long structCapacity = internalStruct.getValueCapacity(); + if (structCapacity < capacity) { + structCapacity = capacity; + } + return (int) capacity; + } + + @Override + public void close() { + clear(); + } + + @Override + public void clear() { + valueCount = 0; + typeBuffer.getReferenceManager().release(); + typeBuffer = allocator.getEmpty(); + offsetBuffer.getReferenceManager().release(); + offsetBuffer = allocator.getEmpty(); + internalStruct.clear(); + } + + @Override + public void reset() { + valueCount = 0; + setNegative(0, typeBuffer.capacity()); + offsetBuffer.setZero(0, offsetBuffer.capacity()); + internalStruct.reset(); + } + + @Override + public Field getField() { + int childCount = (int) Arrays.stream(typeFields).filter(field -> field != null).count(); + List childFields = new ArrayList<>(childCount); + int[] typeIds = new int[childCount]; + for (int i = 0; i < typeFields.length; i++) { + if (typeFields[i] != null) { + int curIdx = childFields.size(); + typeIds[curIdx] = i; + childFields.add(typeFields[i]); + } + } + + FieldType fieldType; + if (this.fieldType == null) { + fieldType = FieldType.nullable(new ArrowType.Union(Dense, typeIds)); + } else { + final UnionMode mode = UnionMode.Dense; + fieldType = new FieldType(this.fieldType.isNullable(), new ArrowType.Union(mode, typeIds), + this.fieldType.getDictionary(), this.fieldType.getMetadata()); + } + + return new Field(name, fieldType, childFields); + } + + @Override + public TransferPair getTransferPair(BufferAllocator allocator) { + return getTransferPair(name, allocator); + } + + @Override + public TransferPair getTransferPair(String ref, BufferAllocator allocator) { + return getTransferPair(ref, allocator, null); + } + + @Override + public TransferPair getTransferPair(String ref, BufferAllocator allocator, CallBack callBack) { + return new org.apache.arrow.vector.complex.DenseUnionVector.TransferImpl(ref, allocator, callBack); + } + + @Override + public TransferPair getTransferPair(Field field, BufferAllocator allocator) { + return getTransferPair(field, allocator, null); + } + + @Override + public TransferPair getTransferPair(Field field, BufferAllocator allocator, CallBack callBack) { + return new org.apache.arrow.vector.complex.DenseUnionVector.TransferImpl(field, allocator, callBack); + } + + @Override + public TransferPair makeTransferPair(ValueVector target) { + return new TransferImpl((DenseUnionVector) target); + } + + @Override + public void copyFrom(int inIndex, int outIndex, ValueVector from) { + Preconditions.checkArgument(this.getMinorType() == from.getMinorType()); + DenseUnionVector fromCast = (DenseUnionVector) from; + int inOffset = fromCast.offsetBuffer.getInt((long) inIndex * OFFSET_WIDTH); + fromCast.getReader().setPosition(inOffset); + int outOffset = offsetBuffer.getInt((long) outIndex * OFFSET_WIDTH); + getWriter().setPosition(outOffset); + ComplexCopier.copy(fromCast.reader, writer); + } + + @Override + public void copyFromSafe(int inIndex, int outIndex, ValueVector from) { + copyFrom(inIndex, outIndex, from); + } + + public FieldVector addVector(byte typeId, FieldVector v) { + final String name = v.getName().isEmpty() ? fieldName(typeId, v.getMinorType()) : v.getName(); + Preconditions.checkState(internalStruct.getChild(name) == null, String.format("%s vector already exists", name)); + final FieldVector newVector = internalStruct.addOrGet(name, v.getField().getFieldType(), v.getClass()); + v.makeTransferPair(newVector).transfer(); + internalStruct.putChild(name, newVector); + childVectors[typeId] = newVector; + if (callBack != null) { + callBack.doWork(); + } + return newVector; + } + + private class TransferImpl implements TransferPair { + private final TransferPair[] internalTransferPairs = new TransferPair[nextTypeId]; + private final DenseUnionVector to; + + public TransferImpl(String name, BufferAllocator allocator, CallBack callBack) { + to = new DenseUnionVector(name, allocator, null, callBack); + internalStruct.makeTransferPair(to.internalStruct); + createTransferPairs(); + } + + public TransferImpl(Field field, BufferAllocator allocator, CallBack callBack) { + to = new DenseUnionVector(field.getName(), allocator, null, callBack); + internalStruct.makeTransferPair(to.internalStruct); + createTransferPairs(); + } + + public TransferImpl(DenseUnionVector to) { + this.to = to; + internalStruct.makeTransferPair(to.internalStruct); + createTransferPairs(); + } + + private void createTransferPairs() { + for (int i = 0; i < nextTypeId; i++) { + ValueVector srcVec = internalStruct.getVectorById(i); + ValueVector dstVec = to.internalStruct.getVectorById(i); + to.typeFields[i] = typeFields[i]; + to.typeMapFields[i] = typeMapFields[i]; + to.childVectors[i] = dstVec; + internalTransferPairs[i] = srcVec.makeTransferPair(dstVec); + } + } + + @Override + public void transfer() { + to.clear(); + + ReferenceManager refManager = typeBuffer.getReferenceManager(); + to.typeBuffer = refManager.transferOwnership(typeBuffer, to.allocator).getTransferredBuffer(); + + refManager = offsetBuffer.getReferenceManager(); + to.offsetBuffer = refManager.transferOwnership(offsetBuffer, to.allocator).getTransferredBuffer(); + + for (int i = 0; i < nextTypeId; i++) { + if (internalTransferPairs[i] != null) { + internalTransferPairs[i].transfer(); + to.childVectors[i] = internalTransferPairs[i].getTo(); + } + } + to.valueCount = valueCount; + clear(); + } + + @Override + public void splitAndTransfer(int startIndex, int length) { + to.clear(); + + // transfer type buffer + int startPoint = startIndex * TYPE_WIDTH; + int sliceLength = length * TYPE_WIDTH; + ArrowBuf slicedBuffer = typeBuffer.slice(startPoint, sliceLength); + ReferenceManager refManager = slicedBuffer.getReferenceManager(); + to.typeBuffer = refManager.transferOwnership(slicedBuffer, to.allocator).getTransferredBuffer(); + + // transfer offset buffer + while (to.offsetBuffer.capacity() < (long) length * OFFSET_WIDTH) { + to.reallocOffsetBuffer(); + } + + int [] typeCounts = new int[nextTypeId]; + int [] typeStarts = new int[nextTypeId]; + for (int i = 0; i < typeCounts.length; i++) { + typeCounts[i] = 0; + typeStarts[i] = -1; + } + + for (int i = startIndex; i < startIndex + length; i++) { + byte typeId = typeBuffer.getByte(i); + if (typeId >= 0) { + to.offsetBuffer.setInt((long) (i - startIndex) * OFFSET_WIDTH, typeCounts[typeId]); + typeCounts[typeId] += 1; + if (typeStarts[typeId] == -1) { + typeStarts[typeId] = offsetBuffer.getInt((long) i * OFFSET_WIDTH); + } + } + } + + // transfer vector values + for (int i = 0; i < nextTypeId; i++) { + if (typeCounts[i] > 0 && typeStarts[i] != -1) { + internalTransferPairs[i].splitAndTransfer(typeStarts[i], typeCounts[i]); + to.childVectors[i] = internalTransferPairs[i].getTo(); + } + } + + to.setValueCount(length); + } + + @Override + public ValueVector getTo() { + return to; + } + + @Override + public void copyValueSafe(int from, int to) { + this.to.copyFrom(from, to, DenseUnionVector.this); + } + } + + @Override + public FieldReader getReader() { + if (reader == null) { + reader = new DenseUnionReader(this); + } + return reader; + } + + public FieldWriter getWriter() { + if (writer == null) { + writer = new DenseUnionWriter(this); + } + return writer; + } + + @Override + public int getBufferSize() { + return this.getBufferSizeFor(this.valueCount); + } + + @Override + public int getBufferSizeFor(final int count) { + if (count == 0) { + return 0; + } + + int[] counts = new int[Byte.MAX_VALUE + 1]; + for (int i = 0; i < count; i++) { + byte typeId = getTypeId(i); + if (typeId != -1) { + counts[typeId] += 1; + } + } + + long childBytes = 0; + for (int typeId = 0; typeId < childVectors.length; typeId++) { + ValueVector childVector = childVectors[typeId]; + if (childVector != null) { + childBytes += childVector.getBufferSizeFor(counts[typeId]); + } + } + + return (int) (count * TYPE_WIDTH + (long) count * OFFSET_WIDTH + childBytes); + } + + @Override + public ArrowBuf[] getBuffers(boolean clear) { + List list = new java.util.ArrayList<>(); + setReaderAndWriterIndex(); + if (getBufferSize() != 0) { + list.add(typeBuffer); + list.add(offsetBuffer); + list.addAll(java.util.Arrays.asList(internalStruct.getBuffers(clear))); + } + if (clear) { + valueCount = 0; + typeBuffer.getReferenceManager().retain(); + typeBuffer.close(); + typeBuffer = allocator.getEmpty(); + offsetBuffer.getReferenceManager().retain(); + offsetBuffer.close(); + offsetBuffer = allocator.getEmpty(); + } + return list.toArray(new ArrowBuf[list.size()]); + } + + @Override + public Iterator iterator() { + return internalStruct.iterator(); + } + + private ValueVector getVector(int index) { + byte typeId = typeBuffer.getByte(index * TYPE_WIDTH); + return getVectorByType(typeId); + } + + public Object getObject(int index) { + ValueVector vector = getVector(index); + if (vector != null) { + int offset = offsetBuffer.getInt((long) index * OFFSET_WIDTH); + return vector.isNull(offset) ? null : vector.getObject(offset); + } + return null; + } + + public void get(int index, DenseUnionHolder holder) { + FieldReader reader = new DenseUnionReader(DenseUnionVector.this); + reader.setPosition(index); + holder.reader = reader; + } + + public int getValueCount() { + return valueCount; + } + + /** + * IMPORTANT: Union types always return non null as there is no validity buffer. + * + * To check validity correctly you must check the underlying vector. + */ + public boolean isNull(int index) { + return false; + } + + @Override + public int getNullCount() { + return 0; + } + + public int isSet(int index) { + return isNull(index) ? 0 : 1; + } + + DenseUnionWriter writer; + + public void setValueCount(int valueCount) { + this.valueCount = valueCount; + while (valueCount > getTypeBufferValueCapacity()) { + reallocTypeBuffer(); + reallocOffsetBuffer(); + } + setChildVectorValueCounts(); + } + + private void setChildVectorValueCounts() { + int [] counts = new int[Byte.MAX_VALUE + 1]; + for (int i = 0; i < this.valueCount; i++) { + byte typeId = getTypeId(i); + if (typeId != -1) { + counts[typeId] += 1; + } + } + for (int i = 0; i < nextTypeId; i++) { + childVectors[typeMapFields[i]].setValueCount(counts[typeMapFields[i]]); + } + } + + public void setSafe(int index, DenseUnionHolder holder) { + FieldReader reader = holder.reader; + if (writer == null) { + writer = new DenseUnionWriter(DenseUnionVector.this); + } + int offset = offsetBuffer.getInt((long) index * OFFSET_WIDTH); + MinorType type = reader.getMinorType(); + writer.setPosition(offset); + byte typeId = holder.typeId; + switch (type) { + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal")> + case ${name?upper_case}: + Nullable${name}Holder ${uncappedName}Holder = new Nullable${name}Holder(); + reader.read(${uncappedName}Holder); + setSafe(index, ${uncappedName}Holder); + break; + + + + case STRUCT: + case LIST: { + setTypeId(index, typeId); + ComplexCopier.copy(reader, writer); + break; + } + default: + throw new UnsupportedOperationException(); + } + } + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal")> + public void setSafe(int index, Nullable${name}Holder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + ${name}Vector vector = get${name}Vector(typeId<#if minor.class?starts_with("Decimal")>, new ArrowType.Decimal(holder.precision, holder.scale, holder.WIDTH * 8)); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + + + + + public void setTypeId(int index, byte typeId) { + while (index >= getTypeBufferValueCapacity()) { + reallocTypeBuffer(); + } + typeBuffer.setByte(index * TYPE_WIDTH , typeId); + } + + private int getTypeBufferValueCapacity() { + return (int) typeBuffer.capacity() / TYPE_WIDTH; + } + + public void setOffset(int index, int offset) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + + private long getOffsetBufferValueCapacity() { + return offsetBuffer.capacity() / OFFSET_WIDTH; + } + + @Override + public int hashCode(int index, ArrowBufHasher hasher) { + if (isNull(index)) { + return 0; + } + int offset = offsetBuffer.getInt((long) index * OFFSET_WIDTH); + return getVector(index).hashCode(offset, hasher); + } + + @Override + public int hashCode(int index) { + return hashCode(index, SimpleHasher.INSTANCE); + } + + @Override + public OUT accept(VectorVisitor visitor, IN value) { + return visitor.visit(this, value); + } + + @Override + public String getName() { + return name; + } + + private void setNegative(long start, long end) { + for (long i = start;i < end; i++) { + typeBuffer.setByte(i, -1); + } + } + + @Override + public T addOrGet(String name, FieldType fieldType, Class clazz) { + return internalStruct.addOrGet(name, fieldType, clazz); + } + + @Override + public T getChild(String name, Class clazz) { + return internalStruct.getChild(name, clazz); + } + + @Override + public VectorWithOrdinal getChildVectorWithOrdinal(String name) { + return internalStruct.getChildVectorWithOrdinal(name); + } + + @Override + public int size() { + return internalStruct.size(); + } + + @Override + public void setInitialCapacity(int valueCount, double density) { + for (final ValueVector vector : internalStruct) { + if (vector instanceof DensityAwareVector) { + ((DensityAwareVector) vector).setInitialCapacity(valueCount, density); + } else { + vector.setInitialCapacity(valueCount); + } + } + } + + /** + * Set the element at the given index to null. For DenseUnionVector, it throws an UnsupportedOperationException + * as nulls are not supported at the top level and isNull() always returns false. + * + * @param index position of element + * @throws UnsupportedOperationException whenever invoked + */ + @Override + public void setNull(int index) { + throw new UnsupportedOperationException("The method setNull() is not supported on DenseUnionVector."); + } +} diff --git a/java/vector/target/codegen/templates/DenseUnionWriter.java b/java/vector/target/codegen/templates/DenseUnionWriter.java new file mode 100644 index 000000000000..e69a62a9e0f6 --- /dev/null +++ b/java/vector/target/codegen/templates/DenseUnionWriter.java @@ -0,0 +1,302 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.arrow.vector.complex.StructVector; +import org.apache.arrow.vector.complex.impl.NullableStructWriterFactory; +import org.apache.arrow.vector.types.Types; + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/DenseUnionWriter.java" /> + + +<#include "/@includes/license.ftl" /> + + package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + import org.apache.arrow.vector.complex.writer.BaseWriter; + import org.apache.arrow.vector.types.Types.MinorType; + +/* + * This class is generated using freemarker and the ${.template_name} template. + */ +@SuppressWarnings("unused") +public class DenseUnionWriter extends AbstractFieldWriter implements FieldWriter { + + DenseUnionVector data; + + private BaseWriter[] writers = new BaseWriter[Byte.MAX_VALUE + 1]; + private final NullableStructWriterFactory nullableStructWriterFactory; + + public DenseUnionWriter(DenseUnionVector vector) { + this(vector, NullableStructWriterFactory.getNullableStructWriterFactoryInstance()); + } + + public DenseUnionWriter(DenseUnionVector vector, NullableStructWriterFactory nullableStructWriterFactory) { + data = vector; + this.nullableStructWriterFactory = nullableStructWriterFactory; + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + for (BaseWriter writer : writers) { + writer.setPosition(index); + } + } + + @Override + public void start() { + byte typeId = data.getTypeId(idx()); + getStructWriter((byte) idx()).start(); + } + + @Override + public void end() { + byte typeId = data.getTypeId(idx()); + getStructWriter(typeId).end(); + } + + @Override + public void startList() { + byte typeId = data.getTypeId(idx()); + getListWriter(typeId).startList(); + } + + @Override + public void endList() { + byte typeId = data.getTypeId(idx()); + getListWriter(typeId).endList(); + } + + private StructWriter getStructWriter(byte typeId) { + StructWriter structWriter = (StructWriter) writers[typeId]; + if (structWriter == null) { + structWriter = nullableStructWriterFactory.build((StructVector) data.getVectorByType(typeId)); + writers[typeId] = structWriter; + } + return structWriter; + } + + public StructWriter asStruct(byte typeId) { + data.setTypeId(idx(), typeId); + return getStructWriter(typeId); + } + + private ListWriter getListWriter(byte typeId) { + ListWriter listWriter = (ListWriter) writers[typeId]; + if (listWriter == null) { + listWriter = new UnionListWriter((ListVector) data.getVectorByType(typeId), nullableStructWriterFactory); + writers[typeId] = listWriter; + } + return listWriter; + } + + public ListWriter asList(byte typeId) { + data.setTypeId(idx(), typeId); + return getListWriter(typeId); + } + + private MapWriter getMapWriter(byte typeId) { + MapWriter mapWriter = (MapWriter) writers[typeId]; + if (mapWriter == null) { + mapWriter = new UnionMapWriter((MapVector) data.getVectorByType(typeId)); + writers[typeId] = mapWriter; + } + return mapWriter; + } + + public MapWriter asMap(byte typeId) { + data.setTypeId(idx(), typeId); + return getMapWriter(typeId); + } + + BaseWriter getWriter(byte typeId) { + MinorType minorType = data.getVectorByType(typeId).getMinorType(); + switch (minorType) { + case STRUCT: + return getStructWriter(typeId); + case LIST: + return getListWriter(typeId); + case MAP: + return getMapWriter(typeId); + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal")> + case ${name?upper_case}: + return get${name}Writer(typeId); + + + + default: + throw new UnsupportedOperationException("Unknown type: " + minorType); + } + } + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal")> + + private ${name}Writer get${name}Writer(byte typeId) { + ${name}Writer writer = (${name}Writer) writers[typeId]; + if (writer == null) { + writer = new ${name}WriterImpl((${name}Vector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public ${name}Writer as${name}(byte typeId) { + data.setTypeId(idx(), typeId); + return get${name}Writer(typeId); + } + + @Override + public void write(${name}Holder holder) { + throw new UnsupportedOperationException(); + } + + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, , byte typeId<#if minor.class?starts_with("Decimal")>, ArrowType arrowType) { + data.setTypeId(idx(), typeId); + get${name}Writer(typeId).setPosition(data.getOffset(idx())); + get${name}Writer(typeId).write${name}(<#list fields as field>${field.name}<#if field_has_next>, <#if minor.class?starts_with("Decimal")>, arrowType); + } + + + + + public void writeNull() { + } + + @Override + public StructWriter struct() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).struct(); + } + + @Override + public ListWriter list() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).list(); + } + + @Override + public ListWriter list(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).list(name); + } + + @Override + public MapWriter map() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getMapWriter(typeId).map(); + } + + @Override + public MapWriter map(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).map(name); + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).map(name, keysSorted); + } + + @Override + public StructWriter struct(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).struct(name); + } + + <#list vv.types as type><#list type.minor as minor> + <#assign lowerName = minor.class?uncap_first /> + <#if lowerName == "int" ><#assign lowerName = "integer" /> + <#assign upperName = minor.class?upper_case /> + <#assign capName = minor.class?cap_first /> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") > + @Override + public ${capName}Writer ${lowerName}(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).${lowerName}(name); + } + + @Override + public ${capName}Writer ${lowerName}() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).${lowerName}(); + } + + <#if minor.class?starts_with("Decimal")> + public ${capName}Writer ${lowerName}(String name<#list minor.typeParams as typeParam>, ${typeParam.type} ${typeParam.name}) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).${lowerName}(name<#list minor.typeParams as typeParam>, ${typeParam.name}); + } + + + + @Override + public void allocate() { + data.allocateNew(); + } + + @Override + public void clear() { + data.clear(); + } + + @Override + public void close() throws Exception { + data.close(); + } + + @Override + public Field getField() { + return data.getField(); + } + + @Override + public int getValueCapacity() { + return data.getValueCapacity(); + } +} diff --git a/java/vector/target/codegen/templates/HolderReaderImpl.java b/java/vector/target/codegen/templates/HolderReaderImpl.java new file mode 100644 index 000000000000..8394aaad4175 --- /dev/null +++ b/java/vector/target/codegen/templates/HolderReaderImpl.java @@ -0,0 +1,173 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +<@pp.dropOutputFile /> +<#list vv.types as type> +<#list type.minor as minor> +<#list ["", "Nullable"] as holderMode> +<#assign nullMode = holderMode /> + +<#assign lowerName = minor.class?uncap_first /> +<#if lowerName == "int" ><#assign lowerName = "integer" /> +<#assign name = minor.class?cap_first /> +<#assign javaType = (minor.javaType!type.javaType) /> +<#assign friendlyType = (minor.friendlyType!minor.boxedType!type.boxedType) /> +<#assign safeType=friendlyType /> +<#if safeType=="byte[]"><#assign safeType="ByteArray" /> +<#assign fields = (minor.fields!type.fields) + minor.typeParams![]/> + +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/${holderMode}${name}HolderReaderImpl.java" /> +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + +// Source code generated using FreeMarker template ${.template_name} + +@SuppressWarnings("unused") +public class ${holderMode}${name}HolderReaderImpl extends AbstractFieldReader { + + private ${nullMode}${name}Holder holder; + public ${holderMode}${name}HolderReaderImpl(${holderMode}${name}Holder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.${name?upper_case}; + } + + @Override + public boolean isSet() { + <#if holderMode == "Nullable"> + return this.holder.isSet == 1; + <#else> + return true; + + } + + @Override + public void read(${name}Holder h) { + <#list fields as field> + h.${field.name} = holder.${field.name}; + + } + + @Override + public void read(Nullable${name}Holder h) { + <#list fields as field> + h.${field.name} = holder.${field.name}; + + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public ${friendlyType} read${safeType}() { + <#if nullMode == "Nullable"> + if (!isSet()) { + return null; + } + + + <#if type.major == "VarLen"> + <#if type.width == 4> + int length = holder.end - holder.start; + <#elseif type.width == 8> + int length = (int) (holder.end - holder.start); + + byte[] value = new byte [length]; + holder.buffer.getBytes(holder.start, value, 0, length); + <#if minor.class == "VarBinary" || minor.class == "LargeVarBinary"> + return value; + <#elseif minor.class == "VarChar" || minor.class == "LargeVarChar"> + Text text = new Text(); + text.set(value); + return text; + + <#elseif minor.class == "IntervalDay"> + return Duration.ofDays(holder.days).plusMillis(holder.milliseconds); + <#elseif minor.class == "IntervalYear"> + return Period.ofMonths(holder.value); + <#elseif minor.class == "IntervalMonthDayNano"> + return new PeriodDuration(Period.ofMonths(holder.months).plusDays(holder.days), + Duration.ofNanos(holder.nanoseconds)); + <#elseif minor.class == "Duration"> + return DurationVector.toDuration(holder.value, holder.unit); + <#elseif minor.class == "Bit" > + return new Boolean(holder.value != 0); + <#elseif minor.class == "Decimal"> + byte[] bytes = new byte[${type.width}]; + holder.buffer.getBytes(holder.start, bytes, 0, ${type.width}); + ${friendlyType} value = new BigDecimal(new BigInteger(bytes), holder.scale); + return value; + <#elseif minor.class == "Decimal256"> + byte[] bytes = new byte[${type.width}]; + holder.buffer.getBytes(holder.start, bytes, 0, ${type.width}); + ${friendlyType} value = new BigDecimal(new BigInteger(bytes), holder.scale); + return value; + <#elseif minor.class == "FixedSizeBinary"> + byte[] value = new byte [holder.byteWidth]; + holder.buffer.getBytes(0, value, 0, holder.byteWidth); + return value; + <#elseif minor.class == "TimeStampSec"> + final long millis = java.util.concurrent.TimeUnit.SECONDS.toMillis(holder.value); + return DateUtility.getLocalDateTimeFromEpochMilli(millis); + <#elseif minor.class == "TimeStampMilli" || minor.class == "DateMilli" || minor.class == "TimeMilli"> + return DateUtility.getLocalDateTimeFromEpochMilli(holder.value); + <#elseif minor.class == "TimeStampMicro"> + return DateUtility.getLocalDateTimeFromEpochMicro(holder.value); + <#elseif minor.class == "TimeStampNano"> + return DateUtility.getLocalDateTimeFromEpochNano(holder.value); + <#else> + ${friendlyType} value = new ${friendlyType}(this.holder.value); + return value; + + } + + @Override + public Object readObject() { + return read${safeType}(); + } + + <#if nullMode != "Nullable"> + public void copyAsValue(${minor.class?cap_first}Writer writer){ + writer.write(holder); + } + +} + + + + diff --git a/java/vector/target/codegen/templates/NullReader.java b/java/vector/target/codegen/templates/NullReader.java new file mode 100644 index 000000000000..0c65f9a56bfa --- /dev/null +++ b/java/vector/target/codegen/templates/NullReader.java @@ -0,0 +1,147 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.arrow.vector.types.pojo.ArrowType.Null; +import org.apache.arrow.vector.types.pojo.Field; + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/NullReader.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + +/** + * Source code generated using FreeMarker template ${.template_name} + */ +@SuppressWarnings("unused") +public class NullReader extends AbstractBaseReader implements FieldReader{ + + public static final NullReader INSTANCE = new NullReader(); + public static final NullReader EMPTY_LIST_INSTANCE = new NullReader(MinorType.NULL); + public static final NullReader EMPTY_STRUCT_INSTANCE = new NullReader(MinorType.STRUCT); + private MinorType type; + + private NullReader(){ + super(); + type = MinorType.NULL; + } + + private NullReader(MinorType type){ + super(); + this.type = type; + } + + @Override + public MinorType getMinorType() { + return type; + } + + @Override + public Field getField() { + return new Field("", FieldType.nullable(new Null()), null); + } + + public void copyAsValue(StructWriter writer) {} + + public void copyAsValue(ListWriter writer) {} + + public void copyAsValue(UnionWriter writer) {} + + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> + public void read(${name}Holder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(Nullable${name}Holder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, ${name}Holder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(${minor.class}Writer writer){} + public void copyAsField(String name, ${minor.class}Writer writer){} + + public void read(int arrayIndex, Nullable${name}Holder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + + public int size(){ + return 0; + } + + public boolean isSet(){ + return false; + } + + public boolean next(){ + return false; + } + + public RepeatedStructReader struct(){ + return this; + } + + public RepeatedListReader list(){ + return this; + } + + public StructReader struct(String name){ + return this; + } + + public ListReader list(String name){ + return this; + } + + public FieldReader reader(String name){ + return this; + } + + public FieldReader reader(){ + return this; + } + + private void fail(String name){ + throw new IllegalArgumentException(String.format("You tried to read a %s type when you are using a ValueReader of type %s.", name, this.getClass().getSimpleName())); + } + + <#list ["Object", "BigDecimal", "Short", "Integer", "Long", "Boolean", + "LocalDateTime", "Duration", "Period", "Double", "Float", + "Character", "Text", "String", "Byte", "byte[]", "PeriodDuration"] as friendlyType> + <#assign safeType=friendlyType /> + <#if safeType=="byte[]"><#assign safeType="ByteArray" /> + + public ${friendlyType} read${safeType}(int arrayIndex){ + return null; + } + + public ${friendlyType} read${safeType}(){ + return null; + } + + +} + + + diff --git a/java/vector/target/codegen/templates/StructWriters.java b/java/vector/target/codegen/templates/StructWriters.java new file mode 100644 index 000000000000..b6dd2b75c526 --- /dev/null +++ b/java/vector/target/codegen/templates/StructWriters.java @@ -0,0 +1,351 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +<@pp.dropOutputFile /> +<#list ["Nullable", "Single"] as mode> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/${mode}StructWriter.java" /> +<#assign index = "idx()"> +<#if mode == "Single"> +<#assign containerClass = "NonNullableStructVector" /> +<#else> +<#assign containerClass = "StructVector" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> +import java.util.Map; +import java.util.HashMap; + +import org.apache.arrow.vector.holders.RepeatedStructHolder; +import org.apache.arrow.vector.AllocationHelper; +import org.apache.arrow.vector.complex.reader.FieldReader; +import org.apache.arrow.vector.complex.writer.FieldWriter; + +<#function is_timestamp_tz type> + <#return type?starts_with("TimeStamp") && type?ends_with("TZ")> + + +/* + * This class is generated using FreeMarker and the ${.template_name} template. + */ +@SuppressWarnings("unused") +public class ${mode}StructWriter extends AbstractFieldWriter { + + protected final ${containerClass} container; + private int initialCapacity; + private final Map fields = new HashMap<>(); + public ${mode}StructWriter(${containerClass} container) { + <#if mode == "Single"> + if (container instanceof StructVector) { + throw new IllegalArgumentException("Invalid container: " + container); + } + + this.container = container; + this.initialCapacity = 0; + for (Field child : container.getField().getChildren()) { + MinorType minorType = Types.getMinorTypeForArrowType(child.getType()); + switch (minorType) { + case STRUCT: + struct(child.getName()); + break; + case LIST: + list(child.getName()); + break; + case MAP: { + ArrowType.Map arrowType = (ArrowType.Map) child.getType(); + map(child.getName(), arrowType.getKeysSorted()); + break; + } + case DENSEUNION: { + FieldType fieldType = new FieldType(addVectorAsNullable, MinorType.DENSEUNION.getType(), null, null); + DenseUnionWriter writer = new DenseUnionWriter(container.addOrGet(child.getName(), fieldType, DenseUnionVector.class), getNullableStructWriterFactory()); + fields.put(handleCase(child.getName()), writer); + break; + } + case UNION: + FieldType fieldType = new FieldType(addVectorAsNullable, MinorType.UNION.getType(), null, null); + UnionWriter writer = new UnionWriter(container.addOrGet(child.getName(), fieldType, UnionVector.class), getNullableStructWriterFactory()); + fields.put(handleCase(child.getName()), writer); + break; +<#list vv.types as type><#list type.minor as minor> +<#assign lowerName = minor.class?uncap_first /> +<#if lowerName == "int" ><#assign lowerName = "integer" /> +<#assign upperName = minor.class?upper_case /> + case ${upperName}: { + <#if minor.typeParams?? > + ${minor.arrowType} arrowType = (${minor.arrowType})child.getType(); + ${lowerName}(child.getName()<#list minor.typeParams as typeParam>, arrowType.get${typeParam.name?cap_first}()); + <#else> + ${lowerName}(child.getName()); + + break; + } + + default: + throw new UnsupportedOperationException("Unknown type: " + minorType); + } + } + } + + protected String handleCase(final String input) { + return input.toLowerCase(); + } + + protected NullableStructWriterFactory getNullableStructWriterFactory() { + return NullableStructWriterFactory.getNullableStructWriterFactoryInstance(); + } + + @Override + public int getValueCapacity() { + return container.getValueCapacity(); + } + + public void setInitialCapacity(int initialCapacity) { + this.initialCapacity = initialCapacity; + container.setInitialCapacity(initialCapacity); + } + + @Override + public boolean isEmptyStruct() { + return 0 == container.size(); + } + + @Override + public Field getField() { + return container.getField(); + } + + @Override + public StructWriter struct(String name) { + String finalName = handleCase(name); + FieldWriter writer = fields.get(finalName); + if(writer == null){ + int vectorCount=container.size(); + FieldType fieldType = new FieldType(addVectorAsNullable, MinorType.STRUCT.getType(), null, null); + StructVector vector = container.addOrGet(name, fieldType, StructVector.class); + writer = new PromotableWriter(vector, container, getNullableStructWriterFactory()); + if(vectorCount != container.size()) { + writer.allocate(); + } + writer.setPosition(idx()); + fields.put(finalName, writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.STRUCT); + } + } + return writer; + } + + @Override + public void close() throws Exception { + clear(); + container.close(); + } + + @Override + public void allocate() { + container.allocateNew(); + for(final FieldWriter w : fields.values()) { + w.allocate(); + } + } + + @Override + public void clear() { + container.clear(); + for(final FieldWriter w : fields.values()) { + w.clear(); + } + } + + @Override + public ListWriter list(String name) { + String finalName = handleCase(name); + FieldWriter writer = fields.get(finalName); + int vectorCount = container.size(); + if(writer == null) { + FieldType fieldType = new FieldType(addVectorAsNullable, MinorType.LIST.getType(), null, null); + writer = new PromotableWriter(container.addOrGet(name, fieldType, ListVector.class), container, getNullableStructWriterFactory()); + if (container.size() > vectorCount) { + writer.allocate(); + } + writer.setPosition(idx()); + fields.put(finalName, writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.LIST); + } + } + return writer; + } + + @Override + public MapWriter map(String name) { + return map(name, false); + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + MapVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + new ArrowType.Map(keysSorted) + ,null, null), + MapVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.MAP, new ArrowType.Map(keysSorted)); + } + } + return writer; + } + + public void setValueCount(int count) { + container.setValueCount(count); + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + for(final FieldWriter w: fields.values()) { + w.setPosition(index); + } + } + + <#if mode="Nullable"> + @Override + public void writeNull() { + container.setNull(idx()); + setValueCount(idx()+1); + super.setPosition(idx()+1); + } + + + @Override + public void start() { + <#if mode == "Single"> + <#else> + container.setIndexDefined(idx()); + + } + + @Override + public void end() { + setPosition(idx()+1); + } + + <#list vv.types as type><#list type.minor as minor> + <#assign lowerName = minor.class?uncap_first /> + <#if lowerName == "int" ><#assign lowerName = "integer" /> + <#assign upperName = minor.class?upper_case /> + <#assign capName = minor.class?cap_first /> + <#assign vectName = capName /> + + <#if minor.typeParams?? > + @Override + public ${minor.class}Writer ${lowerName}(String name) { + // returns existing writer + final FieldWriter writer = fields.get(handleCase(name)); + Preconditions.checkNotNull(writer); + return writer; + } + + @Override + public ${minor.class}Writer ${lowerName}(String name<#list minor.typeParams as typeParam>, ${typeParam.type} ${typeParam.name}) { + <#else> + @Override + public ${minor.class}Writer ${lowerName}(String name) { + + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + ${vectName}Vector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + <#if minor.typeParams??> + <#if minor.arrowTypeConstructorParams??> + <#assign constructorParams = minor.arrowTypeConstructorParams /> + <#else> + <#assign constructorParams = [] /> + <#list minor.typeParams?reverse as typeParam> + <#assign constructorParams = constructorParams + [ typeParam.name ] /> + + + new ${minor.arrowType}(${constructorParams?join(", ")}<#if minor.class?starts_with("Decimal")>, ${vectName}Vector.TYPE_WIDTH * 8) + <#else> + MinorType.${upperName}.getType() + + ,null, null), + ${vectName}Vector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + <#if minor.class?starts_with("Decimal")> + ((PromotableWriter)writer).getWriter(MinorType.${upperName}<#if minor.class?starts_with("Decimal")>, new ${minor.arrowType}(precision, scale, ${vectName}Vector.TYPE_WIDTH * 8)); + <#elseif is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + <#if minor.arrowTypeConstructorParams??> + <#assign constructorParams = minor.arrowTypeConstructorParams /> + <#else> + <#assign constructorParams = [] /> + <#list minor.typeParams?reverse as typeParam> + <#assign constructorParams = constructorParams + [ typeParam.name ] /> + + + ArrowType arrowType = new ${minor.arrowType}(${constructorParams?join(", ")}); + ((PromotableWriter)writer).getWriter(MinorType.${upperName}, arrowType); + <#else> + ((PromotableWriter)writer).getWriter(MinorType.${upperName}); + + } + } + return writer; + } + + + +} + diff --git a/java/vector/target/codegen/templates/UnionFixedSizeListWriter.java b/java/vector/target/codegen/templates/UnionFixedSizeListWriter.java new file mode 100644 index 000000000000..3436e3a96765 --- /dev/null +++ b/java/vector/target/codegen/templates/UnionFixedSizeListWriter.java @@ -0,0 +1,375 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.arrow.memory.ArrowBuf; +import org.apache.arrow.vector.complex.writer.Decimal256Writer; +import org.apache.arrow.vector.complex.writer.DecimalWriter; +import org.apache.arrow.vector.holders.Decimal256Holder; +import org.apache.arrow.vector.holders.DecimalHolder; + + +import java.lang.UnsupportedOperationException; +import java.math.BigDecimal; + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/UnionFixedSizeListWriter.java" /> + + +<#include "/@includes/license.ftl" /> + + package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + +/* + * This class is generated using freemarker and the ${.template_name} template. + */ + +@SuppressWarnings("unused") +public class UnionFixedSizeListWriter extends AbstractFieldWriter { + + protected FixedSizeListVector vector; + protected PromotableWriter writer; + private boolean inStruct = false; + private String structName; + private final int listSize; + + public UnionFixedSizeListWriter(FixedSizeListVector vector) { + this(vector, NullableStructWriterFactory.getNullableStructWriterFactoryInstance()); + } + + public UnionFixedSizeListWriter(FixedSizeListVector vector, NullableStructWriterFactory nullableStructWriterFactory) { + this.vector = vector; + this.writer = new PromotableWriter(vector.getDataVector(), vector, nullableStructWriterFactory); + this.listSize = vector.getListSize(); + } + + public UnionFixedSizeListWriter(FixedSizeListVector vector, AbstractFieldWriter parent) { + this(vector); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + public Field getField() { + return vector.getField(); + } + + public void setValueCount(int count) { + vector.setValueCount(count); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void close() throws Exception { + vector.close(); + writer.close(); + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + } + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if uncappedName == "int" ><#assign uncappedName = "integer" /> + <#if !minor.typeParams?? > + + @Override + public ${name}Writer ${uncappedName}() { + return this; + } + + @Override + public ${name}Writer ${uncappedName}(String name) { + structName = name; + return writer.${uncappedName}(name); + } + + + + @Override + public DecimalWriter decimal() { + return this; + } + + @Override + public DecimalWriter decimal(String name, int scale, int precision) { + return writer.decimal(name, scale, precision); + } + + @Override + public DecimalWriter decimal(String name) { + return writer.decimal(name); + } + + + @Override + public Decimal256Writer decimal256() { + return this; + } + + @Override + public Decimal256Writer decimal256(String name, int scale, int precision) { + return writer.decimal256(name, scale, precision); + } + + @Override + public Decimal256Writer decimal256(String name) { + return writer.decimal256(name); + } + + @Override + public StructWriter struct() { + inStruct = true; + return this; + } + + @Override + public ListWriter list() { + return writer; + } + + @Override + public ListWriter list(String name) { + ListWriter listWriter = writer.list(name); + return listWriter; + } + + @Override + public StructWriter struct(String name) { + StructWriter structWriter = writer.struct(name); + return structWriter; + } + + @Override + public MapWriter map() { + return writer; + } + + @Override + public MapWriter map(String name) { + MapWriter mapWriter = writer.map(name); + return mapWriter; + } + + @Override + public MapWriter map(boolean keysSorted) { + writer.map(keysSorted); + return writer; + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + MapWriter mapWriter = writer.map(name, keysSorted); + return mapWriter; + } + + @Override + public void startList() { + int start = vector.startNewValue(idx()); + writer.setPosition(start); + } + + @Override + public void endList() { + setPosition(idx() + 1); + } + + @Override + public void start() { + writer.start(); + } + + @Override + public void end() { + writer.end(); + inStruct = false; + } + + @Override + public void write(DecimalHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write(holder); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write(Decimal256Holder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write(holder); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeNull() { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeNull(); + } + + public void writeDecimal(long start, ArrowBuf buffer, ArrowType arrowType) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeDecimal(start, buffer, arrowType); + writer.setPosition(writer.idx() + 1); + } + + public void writeDecimal(BigDecimal value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeDecimal(value); + writer.setPosition(writer.idx() + 1); + } + + public void writeBigEndianBytesToDecimal(byte[] value, ArrowType arrowType) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeBigEndianBytesToDecimal(value, arrowType); + writer.setPosition(writer.idx() + 1); + } + + public void writeDecimal256(long start, ArrowBuf buffer, ArrowType arrowType) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeDecimal256(start, buffer, arrowType); + writer.setPosition(writer.idx() + 1); + } + + public void writeDecimal256(BigDecimal value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeDecimal256(value); + writer.setPosition(writer.idx() + 1); + } + + public void writeBigEndianBytesToDecimal256(byte[] value, ArrowType arrowType) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeBigEndianBytesToDecimal256(value, arrowType); + writer.setPosition(writer.idx() + 1); + } + + + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if minor.class?ends_with("VarBinary")> + @Override + public void write${minor.class}(byte[] value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write${minor.class}(byte[] value, int offset, int length) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write${minor.class}(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write${minor.class}(ByteBuffer value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write${minor.class}(ByteBuffer value, int offset, int length) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write${minor.class}(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + <#elseif minor.class?ends_with("VarChar")> + @Override + public void write${minor.class}(Text value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write${minor.class}(String value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + + <#if !minor.typeParams?? > + @Override + public void write${name}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, ) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write${name}(<#list fields as field>${field.name}<#if field_has_next>, ); + writer.setPosition(writer.idx() + 1); + } + + public void write(${name}Holder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write${name}(<#list fields as field>holder.${field.name}<#if field_has_next>, ); + writer.setPosition(writer.idx() + 1); + } + + + + +} diff --git a/java/vector/target/codegen/templates/UnionListWriter.java b/java/vector/target/codegen/templates/UnionListWriter.java new file mode 100644 index 000000000000..5c0565ee2717 --- /dev/null +++ b/java/vector/target/codegen/templates/UnionListWriter.java @@ -0,0 +1,319 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.arrow.memory.ArrowBuf; +import org.apache.arrow.vector.complex.writer.Decimal256Writer; +import org.apache.arrow.vector.complex.writer.DecimalWriter; +import org.apache.arrow.vector.holders.Decimal256Holder; +import org.apache.arrow.vector.holders.DecimalHolder; + + +import java.lang.UnsupportedOperationException; +import java.math.BigDecimal; + +<@pp.dropOutputFile /> +<#list ["List", "LargeList"] as listName> + +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/Union${listName}Writer.java" /> + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +import static org.apache.arrow.memory.util.LargeMemoryUtil.checkedCastToInt; +<#include "/@includes/vv_imports.ftl" /> + +<#function is_timestamp_tz type> + <#return type?starts_with("TimeStamp") && type?ends_with("TZ")> + + +/* + * This class is generated using freemarker and the ${.template_name} template. + */ + +@SuppressWarnings("unused") +public class Union${listName}Writer extends AbstractFieldWriter { + + protected ${listName}Vector vector; + protected PromotableWriter writer; + private boolean inStruct = false; + private boolean listStarted = false; + private String structName; + <#if listName == "LargeList"> + private static final long OFFSET_WIDTH = 8; + <#else> + private static final int OFFSET_WIDTH = 4; + + + public Union${listName}Writer(${listName}Vector vector) { + this(vector, NullableStructWriterFactory.getNullableStructWriterFactoryInstance()); + } + + public Union${listName}Writer(${listName}Vector vector, NullableStructWriterFactory nullableStructWriterFactory) { + this.vector = vector; + this.writer = new PromotableWriter(vector.getDataVector(), vector, nullableStructWriterFactory); + } + + public Union${listName}Writer(${listName}Vector vector, AbstractFieldWriter parent) { + this(vector); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + public Field getField() { + return vector.getField(); + } + + public void setValueCount(int count) { + vector.setValueCount(count); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void close() throws Exception { + vector.close(); + writer.close(); + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + } + + <#list vv.types as type><#list type.minor as minor> + <#assign lowerName = minor.class?uncap_first /> + <#if lowerName == "int" ><#assign lowerName = "integer" /> + <#assign upperName = minor.class?upper_case /> + <#assign capName = minor.class?cap_first /> + <#assign vectName = capName /> + @Override + public ${minor.class}Writer ${lowerName}() { + return this; + } + + <#if minor.typeParams?? > + @Override + public ${minor.class}Writer ${lowerName}(String name<#list minor.typeParams as typeParam>, ${typeParam.type} ${typeParam.name}) { + return writer.${lowerName}(name<#list minor.typeParams as typeParam>, ${typeParam.name}); + } + + + @Override + public ${minor.class}Writer ${lowerName}(String name) { + structName = name; + return writer.${lowerName}(name); + } + + + + @Override + public StructWriter struct() { + inStruct = true; + return this; + } + + @Override + public ListWriter list() { + return writer; + } + + @Override + public ListWriter list(String name) { + ListWriter listWriter = writer.list(name); + return listWriter; + } + + @Override + public StructWriter struct(String name) { + StructWriter structWriter = writer.struct(name); + return structWriter; + } + + @Override + public MapWriter map() { + return writer; + } + + @Override + public MapWriter map(String name) { + MapWriter mapWriter = writer.map(name); + return mapWriter; + } + + @Override + public MapWriter map(boolean keysSorted) { + writer.map(keysSorted); + return writer; + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + MapWriter mapWriter = writer.map(name, keysSorted); + return mapWriter; + } + + <#if listName == "LargeList"> + @Override + public void startList() { + vector.startNewValue(idx()); + writer.setPosition(checkedCastToInt(vector.getOffsetBuffer().getLong((idx() + 1L) * OFFSET_WIDTH))); + listStarted = true; + } + + @Override + public void endList() { + vector.getOffsetBuffer().setLong((idx() + 1L) * OFFSET_WIDTH, writer.idx()); + setPosition(idx() + 1); + listStarted = false; + } + <#else> + @Override + public void startList() { + vector.startNewValue(idx()); + writer.setPosition(vector.getOffsetBuffer().getInt((idx() + 1L) * OFFSET_WIDTH)); + listStarted = true; + } + + @Override + public void endList() { + vector.getOffsetBuffer().setInt((idx() + 1L) * OFFSET_WIDTH, writer.idx()); + setPosition(idx() + 1); + listStarted = false; + } + + + @Override + public void start() { + writer.start(); + } + + @Override + public void end() { + writer.end(); + inStruct = false; + } + + @Override + public void writeNull() { + if (!listStarted){ + vector.setNull(idx()); + } else { + writer.writeNull(); + } + } + + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + @Override + public void write${name}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, ) { + writer.write${name}(<#list fields as field>${field.name}<#if field_has_next>, ); + writer.setPosition(writer.idx()+1); + } + + <#if is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + @Override + public void write(${name}Holder holder) { + writer.write(holder); + writer.setPosition(writer.idx()+1); + } + + <#elseif minor.class?starts_with("Decimal")> + public void write${name}(long start, ArrowBuf buffer, ArrowType arrowType) { + writer.write${name}(start, buffer, arrowType); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(${name}Holder holder) { + writer.write(holder); + writer.setPosition(writer.idx()+1); + } + + public void write${name}(BigDecimal value) { + writer.write${name}(value); + writer.setPosition(writer.idx()+1); + } + + public void writeBigEndianBytesTo${name}(byte[] value, ArrowType arrowType){ + writer.writeBigEndianBytesTo${name}(value, arrowType); + writer.setPosition(writer.idx() + 1); + } + <#else> + @Override + public void write(${name}Holder holder) { + writer.write${name}(<#list fields as field>holder.${field.name}<#if field_has_next>, ); + writer.setPosition(writer.idx()+1); + } + + + <#if minor.class?ends_with("VarBinary")> + @Override + public void write${minor.class}(byte[] value) { + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write${minor.class}(byte[] value, int offset, int length) { + writer.write${minor.class}(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write${minor.class}(ByteBuffer value) { + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write${minor.class}(ByteBuffer value, int offset, int length) { + writer.write${minor.class}(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + <#elseif minor.class?ends_with("VarChar")> + @Override + public void write${minor.class}(Text value) { + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + public void write${minor.class}(String value) { + writer.write${minor.class}(value); + writer.setPosition(writer.idx() + 1); + } + + + + +} + diff --git a/java/vector/target/codegen/templates/UnionMapWriter.java b/java/vector/target/codegen/templates/UnionMapWriter.java new file mode 100644 index 000000000000..606f880377be --- /dev/null +++ b/java/vector/target/codegen/templates/UnionMapWriter.java @@ -0,0 +1,222 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.arrow.memory.ArrowBuf; +import org.apache.arrow.vector.complex.writer.Decimal256Writer; +import org.apache.arrow.vector.complex.writer.DecimalWriter; +import org.apache.arrow.vector.holders.Decimal256Holder; +import org.apache.arrow.vector.holders.DecimalHolder; + +import java.lang.UnsupportedOperationException; +import java.math.BigDecimal; + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/UnionMapWriter.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + +/* + * This class is generated using freemarker and the ${.template_name} template. + */ + +/** + *

Writer for MapVectors. This extends UnionListWriter to simplify writing map entries to a list + * of struct elements, with "key" and "value" fields. The procedure for writing a map begin with + * {@link #startMap()} followed by {@link #startEntry()}. An entry is written by using the + * {@link #key()} writer to write the key, then the {@link #value()} writer to write a value. After + * writing the value, call {@link #endEntry()} to complete the entry. Each map can have 1 or more + * entries. When done writing entries, call {@link #endMap()} to complete the map. + * + *

NOTE: the MapVector can have NULL values by not writing to position. If a map is started with + * {@link #startMap()}, then it must have a key written. The value of a map entry can be NULL by + * not using the {@link #value()} writer. + * + *

Example to write the following map to position 5 of a vector + *

{@code
+ *   // {
+ *   //   1 -> 3,
+ *   //   2 -> 4,
+ *   //   3 -> NULL
+ *   // }
+ *
+ *   UnionMapWriter writer = ...
+ *
+ *   writer.setPosition(5);
+ *   writer.startMap();
+ *   writer.startEntry();
+ *   writer.key().integer().writeInt(1);
+ *   writer.value().integer().writeInt(3);
+ *   writer.endEntry();
+ *   writer.startEntry();
+ *   writer.key().integer().writeInt(2);
+ *   writer.value().integer().writeInt(4);
+ *   writer.endEntry();
+ *   writer.startEntry();
+ *   writer.key().integer().writeInt(3);
+ *   writer.endEntry();
+ *   writer.endMap();
+ * 
+ *

+ */ +@SuppressWarnings("unused") +public class UnionMapWriter extends UnionListWriter { + + /** + * Current mode for writing map entries, set by calling {@link #key()} or {@link #value()} + * and reset with a call to {@link #endEntry()}. With KEY mode, a struct writer with field + * named "key" is returned. With VALUE mode, a struct writer with field named "value" is + * returned. In OFF mode, the writer will behave like a standard UnionListWriter + */ + private enum MapWriteMode { + OFF, + KEY, + VALUE, + } + + private MapWriteMode mode = MapWriteMode.OFF; + private StructWriter entryWriter; + + public UnionMapWriter(MapVector vector) { + super(vector); + entryWriter = struct(); + } + + /** Start writing a map that consists of 1 or more entries. */ + public void startMap() { + startList(); + } + + /** Complete the map. */ + public void endMap() { + endList(); + } + + /** + * Start a map entry that should be followed by calls to {@link #key()} and {@link #value()} + * writers. Call {@link #endEntry()} to complete the entry. + */ + public void startEntry() { + writer.setAddVectorAsNullable(false); + entryWriter.start(); + } + + /** Complete the map entry. */ + public void endEntry() { + entryWriter.end(); + mode = MapWriteMode.OFF; + writer.setAddVectorAsNullable(true); + } + + /** Return the key writer that is used to write to the "key" field. */ + public UnionMapWriter key() { + writer.setAddVectorAsNullable(false); + mode = MapWriteMode.KEY; + return this; + } + + /** Return the value writer that is used to write to the "value" field. */ + public UnionMapWriter value() { + writer.setAddVectorAsNullable(true); + mode = MapWriteMode.VALUE; + return this; + } + + <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if uncappedName == "int" ><#assign uncappedName = "integer" /> + <#if !minor.typeParams?? > + @Override + public ${name}Writer ${uncappedName}() { + switch (mode) { + case KEY: + return entryWriter.${uncappedName}(MapVector.KEY_NAME); + case VALUE: + return entryWriter.${uncappedName}(MapVector.VALUE_NAME); + default: + return this; + } + } + + + + @Override + public DecimalWriter decimal() { + switch (mode) { + case KEY: + return entryWriter.decimal(MapVector.KEY_NAME); + case VALUE: + return entryWriter.decimal(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public Decimal256Writer decimal256() { + switch (mode) { + case KEY: + return entryWriter.decimal256(MapVector.KEY_NAME); + case VALUE: + return entryWriter.decimal256(MapVector.VALUE_NAME); + default: + return this; + } + } + + + @Override + public StructWriter struct() { + switch (mode) { + case KEY: + return entryWriter.struct(MapVector.KEY_NAME); + case VALUE: + return entryWriter.struct(MapVector.VALUE_NAME); + default: + return super.struct(); + } + } + + @Override + public ListWriter list() { + switch (mode) { + case KEY: + return entryWriter.list(MapVector.KEY_NAME); + case VALUE: + return entryWriter.list(MapVector.VALUE_NAME); + default: + return super.list(); + } + } + + @Override + public MapWriter map(boolean keysSorted) { + switch (mode) { + case KEY: + return entryWriter.map(MapVector.KEY_NAME, keysSorted); + case VALUE: + return entryWriter.map(MapVector.VALUE_NAME, keysSorted); + default: + return super.map(); + } + } +} diff --git a/java/vector/target/codegen/templates/UnionReader.java b/java/vector/target/codegen/templates/UnionReader.java new file mode 100644 index 000000000000..822d4822987f --- /dev/null +++ b/java/vector/target/codegen/templates/UnionReader.java @@ -0,0 +1,230 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +import org.apache.arrow.vector.types.Types.MinorType; +import org.apache.arrow.vector.types.pojo.Field; + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/UnionReader.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> + +<#function is_timestamp_tz type> + <#return type?starts_with("TimeStamp") && type?ends_with("TZ")> + + +/** + * Source code generated using FreeMarker template ${.template_name} + */ +@SuppressWarnings("unused") +public class UnionReader extends AbstractFieldReader { + + private static final int NUM_SUPPORTED_TYPES = 46; + + private BaseReader[] readers = new BaseReader[NUM_SUPPORTED_TYPES]; + public UnionVector data; + + public UnionReader(UnionVector data) { + this.data = data; + } + + public MinorType getMinorType() { + return TYPES[data.getTypeValue(idx())]; + } + + private static MinorType[] TYPES = new MinorType[NUM_SUPPORTED_TYPES]; + + static { + for (MinorType minorType : MinorType.values()) { + TYPES[minorType.ordinal()] = minorType; + } + } + + @Override + public Field getField() { + return data.getField(); + } + + public boolean isSet(){ + return !data.isNull(idx()); + } + + public void read(UnionHolder holder) { + holder.reader = this; + holder.isSet = this.isSet() ? 1 : 0; + } + + public void read(int index, UnionHolder holder) { + getList().read(index, holder); + } + + private FieldReader getReaderForIndex(int index) { + int typeValue = data.getTypeValue(index); + FieldReader reader = (FieldReader) readers[typeValue]; + if (reader != null) { + return reader; + } + switch (MinorType.values()[typeValue]) { + case NULL: + return NullReader.INSTANCE; + case STRUCT: + return (FieldReader) getStruct(); + case LIST: + return (FieldReader) getList(); + case MAP: + return (FieldReader) getMap(); + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + case ${name?upper_case}: + return (FieldReader) get${name}(); + + + + default: + throw new UnsupportedOperationException("Unsupported type: " + MinorType.values()[typeValue]); + } + } + + private SingleStructReaderImpl structReader; + + private StructReader getStruct() { + if (structReader == null) { + structReader = (SingleStructReaderImpl) data.getStruct().getReader(); + structReader.setPosition(idx()); + readers[MinorType.STRUCT.ordinal()] = structReader; + } + return structReader; + } + + private UnionListReader listReader; + + private FieldReader getList() { + if (listReader == null) { + listReader = new UnionListReader(data.getList()); + listReader.setPosition(idx()); + readers[MinorType.LIST.ordinal()] = listReader; + } + return listReader; + } + + private UnionMapReader mapReader; + + private FieldReader getMap() { + if (mapReader == null) { + mapReader = new UnionMapReader(data.getMap()); + mapReader.setPosition(idx()); + readers[MinorType.MAP.ordinal()] = mapReader; + } + return mapReader; + } + + @Override + public java.util.Iterator iterator() { + return getStruct().iterator(); + } + + @Override + public void copyAsValue(UnionWriter writer) { + writer.data.copyFrom(idx(), writer.idx(), data); + } + + <#list ["Object", "BigDecimal", "Short", "Integer", "Long", "Boolean", + "LocalDateTime", "Duration", "Period", "Double", "Float", + "Character", "Text", "Byte", "byte[]", "PeriodDuration"] as friendlyType> + <#assign safeType=friendlyType /> + <#if safeType=="byte[]"><#assign safeType="ByteArray" /> + + @Override + public ${friendlyType} read${safeType}() { + return getReaderForIndex(idx()).read${safeType}(); + } + + + + public int size() { + return getReaderForIndex(idx()).size(); + } + + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign uncappedName = name?uncap_first/> + <#assign boxedType = (minor.boxedType!type.boxedType) /> + <#assign javaType = (minor.javaType!type.javaType) /> + <#assign friendlyType = (minor.friendlyType!minor.boxedType!type.boxedType) /> + <#assign safeType=friendlyType /> + <#if safeType=="byte[]"><#assign safeType="ByteArray" /> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + + private ${name}ReaderImpl ${uncappedName}Reader; + + private ${name}ReaderImpl get${name}() { + if (${uncappedName}Reader == null) { + ${uncappedName}Reader = new ${name}ReaderImpl(data.get${name}Vector()); + ${uncappedName}Reader.setPosition(idx()); + readers[MinorType.${name?upper_case}.ordinal()] = ${uncappedName}Reader; + } + return ${uncappedName}Reader; + } + + public void read(Nullable${name}Holder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(${name}Writer writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + + + + @Override + public void copyAsValue(ListWriter writer) { + ComplexCopier.copy(this, (FieldWriter) writer); + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + for (BaseReader reader : readers) { + if (reader != null) { + reader.setPosition(index); + } + } + } + + public FieldReader reader(String name){ + return getStruct().reader(name); + } + + public FieldReader reader() { + return getList().reader(); + } + + public boolean next() { + return getReaderForIndex(idx()).next(); + } +} diff --git a/java/vector/target/codegen/templates/UnionVector.java b/java/vector/target/codegen/templates/UnionVector.java new file mode 100644 index 000000000000..ea79c5c2fba7 --- /dev/null +++ b/java/vector/target/codegen/templates/UnionVector.java @@ -0,0 +1,911 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.arrow.memory.ArrowBuf; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.memory.ReferenceManager; +import org.apache.arrow.memory.util.CommonUtil; +import org.apache.arrow.memory.util.hash.ArrowBufHasher; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.BaseValueVector; +import org.apache.arrow.vector.BitVectorHelper; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.ValueVector; +import org.apache.arrow.vector.complex.AbstractStructVector; +import org.apache.arrow.vector.complex.NonNullableStructVector; +import org.apache.arrow.vector.complex.StructVector; +import org.apache.arrow.vector.compare.VectorVisitor; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.UnionMode; +import org.apache.arrow.vector.compare.RangeEqualsVisitor; +import org.apache.arrow.vector.types.pojo.ArrowType; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.types.pojo.FieldType; +import org.apache.arrow.vector.util.CallBack; +import org.apache.arrow.vector.util.DataSizeRoundingUtil; + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/UnionVector.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex; + +<#include "/@includes/vv_imports.ftl" /> +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import org.apache.arrow.memory.ArrowBuf; +import org.apache.arrow.memory.util.CommonUtil; +import org.apache.arrow.vector.compare.VectorVisitor; +import org.apache.arrow.vector.complex.impl.ComplexCopier; +import org.apache.arrow.vector.util.CallBack; +import org.apache.arrow.vector.util.ValueVectorUtility; +import org.apache.arrow.vector.ipc.message.ArrowFieldNode; +import org.apache.arrow.memory.util.ArrowBufPointer; +import org.apache.arrow.memory.util.hash.ArrowBufHasher; +import org.apache.arrow.vector.BaseValueVector; +import org.apache.arrow.vector.util.OversizedAllocationException; +import org.apache.arrow.util.Preconditions; + +import static org.apache.arrow.vector.types.UnionMode.Sparse; +import static org.apache.arrow.memory.util.LargeMemoryUtil.checkedCastToInt; +import static org.apache.arrow.memory.util.LargeMemoryUtil.capAtMaxInt; + +<#function is_timestamp_tz type> + <#return type?starts_with("TimeStamp") && type?ends_with("TZ")> + + +/* + * This class is generated using freemarker and the ${.template_name} template. + */ +@SuppressWarnings("unused") + + +/** + * A vector which can hold values of different types. It does so by using a StructVector which contains a vector for each + * primitive type that is stored. StructVector is used in order to take advantage of its serialization/deserialization methods, + * as well as the addOrGet method. + * + * For performance reasons, UnionVector stores a cached reference to each subtype vector, to avoid having to do the struct lookup + * each time the vector is accessed. + * Source code generated using FreeMarker template ${.template_name} + */ +public class UnionVector extends AbstractContainerVector implements FieldVector { + int valueCount; + + NonNullableStructVector internalStruct; + protected ArrowBuf typeBuffer; + + private StructVector structVector; + private ListVector listVector; + private MapVector mapVector; + + private FieldReader reader; + + private int singleType = 0; + private ValueVector singleVector; + + private int typeBufferAllocationSizeInBytes; + + private final FieldType fieldType; + private final Field[] typeIds = new Field[Byte.MAX_VALUE + 1]; + + public static final byte TYPE_WIDTH = 1; + private static final FieldType INTERNAL_STRUCT_TYPE = new FieldType(false /*nullable*/, + ArrowType.Struct.INSTANCE, null /*dictionary*/, null /*metadata*/); + + public static UnionVector empty(String name, BufferAllocator allocator) { + FieldType fieldType = FieldType.nullable(new ArrowType.Union( + UnionMode.Sparse, null)); + return new UnionVector(name, allocator, fieldType, null); + } + + public UnionVector(String name, BufferAllocator allocator, FieldType fieldType, CallBack callBack) { + super(name, allocator, callBack); + this.fieldType = fieldType; + this.internalStruct = new NonNullableStructVector( + "internal", + allocator, + INTERNAL_STRUCT_TYPE, + callBack, + AbstractStructVector.ConflictPolicy.CONFLICT_REPLACE, + false); + this.typeBuffer = allocator.getEmpty(); + this.typeBufferAllocationSizeInBytes = BaseValueVector.INITIAL_VALUE_ALLOCATION * TYPE_WIDTH; + } + + public BufferAllocator getAllocator() { + return allocator; + } + + @Override + public MinorType getMinorType() { + return MinorType.UNION; + } + + @Override + public void initializeChildrenFromFields(List children) { + int count = 0; + for (Field child: children) { + int typeId = Types.getMinorTypeForArrowType(child.getType()).ordinal(); + if (this.fieldType != null) { + int[] typeIds = ((ArrowType.Union)this.fieldType.getType()).getTypeIds(); + if (typeIds != null) { + typeId = typeIds[count++]; + } + } + typeIds[typeId] = child; + } + internalStruct.initializeChildrenFromFields(children); + } + + @Override + public List getChildrenFromFields() { + return internalStruct.getChildrenFromFields(); + } + + @Override + public void loadFieldBuffers(ArrowFieldNode fieldNode, List ownBuffers) { + if (ownBuffers.size() != 1) { + throw new IllegalArgumentException("Illegal buffer count, expected 1, got: " + ownBuffers.size()); + } + ArrowBuf buffer = ownBuffers.get(0); + typeBuffer.getReferenceManager().release(); + typeBuffer = buffer.getReferenceManager().retain(buffer, allocator); + typeBufferAllocationSizeInBytes = checkedCastToInt(typeBuffer.capacity()); + this.valueCount = fieldNode.getLength(); + } + + @Override + public List getFieldBuffers() { + List result = new ArrayList<>(1); + setReaderAndWriterIndex(); + result.add(typeBuffer); + + return result; + } + + private void setReaderAndWriterIndex() { + typeBuffer.readerIndex(0); + typeBuffer.writerIndex(valueCount * TYPE_WIDTH); + } + + /** + * Get the inner vectors. + * + * @deprecated This API will be removed as the current implementations no longer support inner vectors. + * + * @return the inner vectors for this field as defined by the TypeLayout + */ + @Deprecated + @Override + public List getFieldInnerVectors() { + throw new UnsupportedOperationException("There are no inner vectors. Use geFieldBuffers"); + } + + private String fieldName(MinorType type) { + return type.name().toLowerCase(); + } + + private FieldType fieldType(MinorType type) { + return FieldType.nullable(type.getType()); + } + + private T addOrGet(Types.MinorType minorType, Class c) { + return addOrGet(null, minorType, c); + } + + private T addOrGet(String name, Types.MinorType minorType, ArrowType arrowType, Class c) { + return internalStruct.addOrGet(name == null ? fieldName(minorType) : name, FieldType.nullable(arrowType), c); + } + + private T addOrGet(String name, Types.MinorType minorType, Class c) { + return internalStruct.addOrGet(name == null ? fieldName(minorType) : name, fieldType(minorType), c); + } + + + @Override + public long getValidityBufferAddress() { + throw new UnsupportedOperationException(); + } + + public long getTypeBufferAddress() { + return typeBuffer.memoryAddress(); + } + + @Override + public long getDataBufferAddress() { + throw new UnsupportedOperationException(); + } + + @Override + public long getOffsetBufferAddress() { + throw new UnsupportedOperationException(); + } + + public ArrowBuf getTypeBuffer() { + return typeBuffer; + } + + @Override + public ArrowBuf getValidityBuffer() { throw new UnsupportedOperationException(); } + + @Override + public ArrowBuf getDataBuffer() { throw new UnsupportedOperationException(); } + + @Override + public ArrowBuf getOffsetBuffer() { throw new UnsupportedOperationException(); } + + public StructVector getStruct() { + if (structVector == null) { + int vectorCount = internalStruct.size(); + structVector = addOrGet(MinorType.STRUCT, StructVector.class); + if (internalStruct.size() > vectorCount) { + structVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return structVector; + } + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#assign lowerCaseName = name?lower_case/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + + private ${name}Vector ${uncappedName}Vector; + + <#if minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + public ${name}Vector get${name}Vector() { + if (${uncappedName}Vector == null) { + throw new IllegalArgumentException("No ${name} present. Provide ArrowType argument to create a new vector"); + } + return ${uncappedName}Vector; + } + public ${name}Vector get${name}Vector(ArrowType arrowType) { + return get${name}Vector(null, arrowType); + } + public ${name}Vector get${name}Vector(String name, ArrowType arrowType) { + if (${uncappedName}Vector == null) { + int vectorCount = internalStruct.size(); + ${uncappedName}Vector = addOrGet(name, MinorType.${name?upper_case}, arrowType, ${name}Vector.class); + if (internalStruct.size() > vectorCount) { + ${uncappedName}Vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return ${uncappedName}Vector; + } + <#else> + public ${name}Vector get${name}Vector() { + return get${name}Vector(null); + } + + public ${name}Vector get${name}Vector(String name) { + if (${uncappedName}Vector == null) { + int vectorCount = internalStruct.size(); + ${uncappedName}Vector = addOrGet(name, MinorType.${name?upper_case}, ${name}Vector.class); + if (internalStruct.size() > vectorCount) { + ${uncappedName}Vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return ${uncappedName}Vector; + } + + + + + + public ListVector getList() { + if (listVector == null) { + int vectorCount = internalStruct.size(); + listVector = addOrGet(MinorType.LIST, ListVector.class); + if (internalStruct.size() > vectorCount) { + listVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return listVector; + } + + public MapVector getMap() { + if (mapVector == null) { + throw new IllegalArgumentException("No map present. Provide ArrowType argument to create a new vector"); + } + return mapVector; + } + + public MapVector getMap(ArrowType arrowType) { + return getMap(null, arrowType); + } + + public MapVector getMap(String name, ArrowType arrowType) { + if (mapVector == null) { + int vectorCount = internalStruct.size(); + mapVector = addOrGet(name, MinorType.MAP, arrowType, MapVector.class); + if (internalStruct.size() > vectorCount) { + mapVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return mapVector; + } + + public int getTypeValue(int index) { + return typeBuffer.getByte(index * TYPE_WIDTH); + } + + @Override + public void allocateNew() throws OutOfMemoryException { + /* new allocation -- clear the current buffers */ + clear(); + internalStruct.allocateNew(); + try { + allocateTypeBuffer(); + } catch (Exception e) { + clear(); + throw e; + } + } + + @Override + public boolean allocateNewSafe() { + /* new allocation -- clear the current buffers */ + clear(); + boolean safe = internalStruct.allocateNewSafe(); + if (!safe) { return false; } + try { + allocateTypeBuffer(); + } catch (Exception e) { + clear(); + return false; + } + + return true; + } + + private void allocateTypeBuffer() { + typeBuffer = allocator.buffer(typeBufferAllocationSizeInBytes); + typeBuffer.readerIndex(0); + typeBuffer.setZero(0, typeBuffer.capacity()); + } + + @Override + public void reAlloc() { + internalStruct.reAlloc(); + reallocTypeBuffer(); + } + + private void reallocTypeBuffer() { + final long currentBufferCapacity = typeBuffer.capacity(); + long newAllocationSize = currentBufferCapacity * 2; + if (newAllocationSize == 0) { + if (typeBufferAllocationSizeInBytes > 0) { + newAllocationSize = typeBufferAllocationSizeInBytes; + } else { + newAllocationSize = BaseValueVector.INITIAL_VALUE_ALLOCATION * TYPE_WIDTH * 2; + } + } + newAllocationSize = CommonUtil.nextPowerOfTwo(newAllocationSize); + assert newAllocationSize >= 1; + + if (newAllocationSize > BaseValueVector.MAX_ALLOCATION_SIZE) { + throw new OversizedAllocationException("Unable to expand the buffer"); + } + + final ArrowBuf newBuf = allocator.buffer(checkedCastToInt(newAllocationSize)); + newBuf.setBytes(0, typeBuffer, 0, currentBufferCapacity); + newBuf.setZero(currentBufferCapacity, newBuf.capacity() - currentBufferCapacity); + typeBuffer.getReferenceManager().release(1); + typeBuffer = newBuf; + typeBufferAllocationSizeInBytes = (int)newAllocationSize; + } + + @Override + public void setInitialCapacity(int numRecords) { } + + @Override + public int getValueCapacity() { + return Math.min(getTypeBufferValueCapacity(), internalStruct.getValueCapacity()); + } + + @Override + public void close() { + clear(); + } + + @Override + public void clear() { + valueCount = 0; + typeBuffer.getReferenceManager().release(); + typeBuffer = allocator.getEmpty(); + internalStruct.clear(); + } + + @Override + public void reset() { + valueCount = 0; + typeBuffer.setZero(0, typeBuffer.capacity()); + internalStruct.reset(); + } + + @Override + public Field getField() { + List childFields = new ArrayList<>(); + List children = internalStruct.getChildren(); + int[] typeIds = new int[children.size()]; + for (ValueVector v : children) { + typeIds[childFields.size()] = v.getMinorType().ordinal(); + childFields.add(v.getField()); + } + + FieldType fieldType; + if (this.fieldType == null) { + fieldType = FieldType.nullable(new ArrowType.Union(Sparse, typeIds)); + } else { + final UnionMode mode = ((ArrowType.Union)this.fieldType.getType()).getMode(); + fieldType = new FieldType(this.fieldType.isNullable(), new ArrowType.Union(mode, typeIds), + this.fieldType.getDictionary(), this.fieldType.getMetadata()); + } + + return new Field(name, fieldType, childFields); + } + + @Override + public TransferPair getTransferPair(BufferAllocator allocator) { + return getTransferPair(name, allocator); + } + + @Override + public TransferPair getTransferPair(String ref, BufferAllocator allocator) { + return getTransferPair(ref, allocator, null); + } + + @Override + public TransferPair getTransferPair(String ref, BufferAllocator allocator, CallBack callBack) { + return new org.apache.arrow.vector.complex.UnionVector.TransferImpl(ref, allocator, callBack); + } + + @Override + public TransferPair getTransferPair(Field field, BufferAllocator allocator) { + return getTransferPair(field, allocator, null); + } + + @Override + public TransferPair getTransferPair(Field field, BufferAllocator allocator, CallBack callBack) { + return new org.apache.arrow.vector.complex.UnionVector.TransferImpl(field, allocator, callBack); + } + + @Override + public TransferPair makeTransferPair(ValueVector target) { + return new TransferImpl((UnionVector) target); + } + + @Override + public void copyFrom(int inIndex, int outIndex, ValueVector from) { + Preconditions.checkArgument(this.getMinorType() == from.getMinorType()); + UnionVector fromCast = (UnionVector) from; + fromCast.getReader().setPosition(inIndex); + getWriter().setPosition(outIndex); + ComplexCopier.copy(fromCast.reader, writer); + } + + @Override + public void copyFromSafe(int inIndex, int outIndex, ValueVector from) { + copyFrom(inIndex, outIndex, from); + } + + public FieldVector addVector(FieldVector v) { + final String name = v.getName().isEmpty() ? fieldName(v.getMinorType()) : v.getName(); + Preconditions.checkState(internalStruct.getChild(name) == null, String.format("%s vector already exists", name)); + final FieldVector newVector = internalStruct.addOrGet(name, v.getField().getFieldType(), v.getClass()); + v.makeTransferPair(newVector).transfer(); + internalStruct.putChild(name, newVector); + if (callBack != null) { + callBack.doWork(); + } + return newVector; + } + + /** + * Directly put a vector to internalStruct without creating a new one with same type. + */ + public void directAddVector(FieldVector v) { + String name = fieldName(v.getMinorType()); + Preconditions.checkState(internalStruct.getChild(name) == null, String.format("%s vector already exists", name)); + internalStruct.putChild(name, v); + if (callBack != null) { + callBack.doWork(); + } + } + + private class TransferImpl implements TransferPair { + private final TransferPair internalStructVectorTransferPair; + private final UnionVector to; + + public TransferImpl(String name, BufferAllocator allocator, CallBack callBack) { + to = new UnionVector(name, allocator, /* field type */ null, callBack); + internalStructVectorTransferPair = internalStruct.makeTransferPair(to.internalStruct); + } + + public TransferImpl(Field field, BufferAllocator allocator, CallBack callBack) { + to = new UnionVector(field.getName(), allocator, null, callBack); + internalStructVectorTransferPair = internalStruct.makeTransferPair(to.internalStruct); + } + + public TransferImpl(UnionVector to) { + this.to = to; + internalStructVectorTransferPair = internalStruct.makeTransferPair(to.internalStruct); + } + + @Override + public void transfer() { + to.clear(); + ReferenceManager refManager = typeBuffer.getReferenceManager(); + to.typeBuffer = refManager.transferOwnership(typeBuffer, to.allocator).getTransferredBuffer(); + internalStructVectorTransferPair.transfer(); + to.valueCount = valueCount; + clear(); + } + + @Override + public void splitAndTransfer(int startIndex, int length) { + Preconditions.checkArgument(startIndex >= 0 && length >= 0 && startIndex + length <= valueCount, + "Invalid parameters startIndex: %s, length: %s for valueCount: %s", startIndex, length, valueCount); + to.clear(); + + internalStructVectorTransferPair.splitAndTransfer(startIndex, length); + final int startPoint = startIndex * TYPE_WIDTH; + final int sliceLength = length * TYPE_WIDTH; + final ArrowBuf slicedBuffer = typeBuffer.slice(startPoint, sliceLength); + final ReferenceManager refManager = slicedBuffer.getReferenceManager(); + to.typeBuffer = refManager.transferOwnership(slicedBuffer, to.allocator).getTransferredBuffer(); + to.setValueCount(length); + } + + @Override + public ValueVector getTo() { + return to; + } + + @Override + public void copyValueSafe(int from, int to) { + this.to.copyFrom(from, to, UnionVector.this); + } + } + + @Override + public FieldReader getReader() { + if (reader == null) { + reader = new UnionReader(this); + } + return reader; + } + + public FieldWriter getWriter() { + if (writer == null) { + writer = new UnionWriter(this); + } + return writer; + } + + @Override + public int getBufferSize() { + if (valueCount == 0) { return 0; } + + return (valueCount * TYPE_WIDTH) + internalStruct.getBufferSize(); + } + + @Override + public int getBufferSizeFor(final int valueCount) { + if (valueCount == 0) { + return 0; + } + + long bufferSize = 0; + for (final ValueVector v : (Iterable) this) { + bufferSize += v.getBufferSizeFor(valueCount); + } + + return (int) bufferSize + (valueCount * TYPE_WIDTH); + } + + @Override + public ArrowBuf[] getBuffers(boolean clear) { + List list = new java.util.ArrayList<>(); + setReaderAndWriterIndex(); + if (getBufferSize() != 0) { + list.add(typeBuffer); + list.addAll(java.util.Arrays.asList(internalStruct.getBuffers(clear))); + } + if (clear) { + valueCount = 0; + typeBuffer.getReferenceManager().retain(); + typeBuffer.getReferenceManager().release(); + typeBuffer = allocator.getEmpty(); + } + return list.toArray(new ArrowBuf[list.size()]); + } + + @Override + public Iterator iterator() { + return internalStruct.iterator(); + } + + public ValueVector getVector(int index) { + return getVector(index, null); + } + + public ValueVector getVector(int index, ArrowType arrowType) { + int type = typeBuffer.getByte(index * TYPE_WIDTH); + return getVectorByType(type, arrowType); + } + + public ValueVector getVectorByType(int typeId) { + return getVectorByType(typeId, null); + } + + public ValueVector getVectorByType(int typeId, ArrowType arrowType) { + Field type = typeIds[typeId]; + Types.MinorType minorType; + String name = null; + if (type == null) { + minorType = Types.MinorType.values()[typeId]; + } else { + minorType = Types.getMinorTypeForArrowType(type.getType()); + name = type.getName(); + } + switch (minorType) { + case NULL: + return null; + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + case ${name?upper_case}: + return get${name}Vector(name<#if minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary">, arrowType); + + + + case STRUCT: + return getStruct(); + case LIST: + return getList(); + case MAP: + return getMap(name, arrowType); + default: + throw new UnsupportedOperationException("Cannot support type: " + MinorType.values()[typeId]); + } + } + + public Object getObject(int index) { + ValueVector vector = getVector(index); + if (vector != null) { + return vector.isNull(index) ? null : vector.getObject(index); + } + return null; + } + + public byte[] get(int index) { + return null; + } + + public void get(int index, ComplexHolder holder) { + } + + public void get(int index, UnionHolder holder) { + FieldReader reader = new UnionReader(UnionVector.this); + reader.setPosition(index); + holder.reader = reader; + } + + public int getValueCount() { + return valueCount; + } + + /** + * IMPORTANT: Union types always return non null as there is no validity buffer. + * + * To check validity correctly you must check the underlying vector. + */ + public boolean isNull(int index) { + return false; + } + + @Override + public int getNullCount() { + return 0; + } + + public int isSet(int index) { + return isNull(index) ? 0 : 1; + } + + UnionWriter writer; + + public void setValueCount(int valueCount) { + this.valueCount = valueCount; + while (valueCount > getTypeBufferValueCapacity()) { + reallocTypeBuffer(); + } + internalStruct.setValueCount(valueCount); + } + + public void setSafe(int index, UnionHolder holder) { + setSafe(index, holder, null); + } + + public void setSafe(int index, UnionHolder holder, ArrowType arrowType) { + FieldReader reader = holder.reader; + if (writer == null) { + writer = new UnionWriter(UnionVector.this); + } + writer.setPosition(index); + MinorType type = reader.getMinorType(); + switch (type) { + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + case ${name?upper_case}: + Nullable${name}Holder ${uncappedName}Holder = new Nullable${name}Holder(); + reader.read(${uncappedName}Holder); + <#if minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + setSafe(index, ${uncappedName}Holder, arrowType); + <#else> + setSafe(index, ${uncappedName}Holder); + + break; + + + + case STRUCT: { + ComplexCopier.copy(reader, writer); + break; + } + case LIST: { + ComplexCopier.copy(reader, writer); + break; + } + default: + throw new UnsupportedOperationException(); + } + } + + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + <#if minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + public void setSafe(int index, Nullable${name}Holder holder, ArrowType arrowType) { + setType(index, MinorType.${name?upper_case}); + get${name}Vector(null, arrowType).setSafe(index, holder); + } + <#else> + public void setSafe(int index, Nullable${name}Holder holder) { + setType(index, MinorType.${name?upper_case}); + get${name}Vector(null).setSafe(index, holder); + } + + + + + + public void setType(int index, MinorType type) { + while (index >= getTypeBufferValueCapacity()) { + reallocTypeBuffer(); + } + typeBuffer.setByte(index * TYPE_WIDTH , (byte) type.ordinal()); + } + + private int getTypeBufferValueCapacity() { + return capAtMaxInt(typeBuffer.capacity() / TYPE_WIDTH); + } + + @Override + public int hashCode(int index) { + return hashCode(index, null); + } + + @Override + public int hashCode(int index, ArrowBufHasher hasher) { + ValueVector vec = getVector(index); + if (vec == null) { + return ArrowBufPointer.NULL_HASH_CODE; + } + return vec.hashCode(index, hasher); + } + + @Override + public OUT accept(VectorVisitor visitor, IN value) { + return visitor.visit(this, value); + } + + @Override + public String getName() { + return name; + } + + @Override + public String toString() { + return ValueVectorUtility.getToString(this, 0, getValueCount()); + } + + @Override + public T addOrGet(String name, FieldType fieldType, Class clazz) { + return internalStruct.addOrGet(name, fieldType, clazz); + } + + @Override + public T getChild(String name, Class clazz) { + return internalStruct.getChild(name, clazz); + } + + @Override + public VectorWithOrdinal getChildVectorWithOrdinal(String name) { + return internalStruct.getChildVectorWithOrdinal(name); + } + + @Override + public int size() { + return internalStruct.size(); + } + + @Override + public void setInitialCapacity(int valueCount, double density) { + for (final ValueVector vector : internalStruct) { + if (vector instanceof DensityAwareVector) { + ((DensityAwareVector) vector).setInitialCapacity(valueCount, density); + } else { + vector.setInitialCapacity(valueCount); + } + } + } + + /** + * Set the element at the given index to null. For UnionVector, it throws an UnsupportedOperationException + * as nulls are not supported at the top level and isNull() always returns false. + * + * @param index position of element + * @throws UnsupportedOperationException whenever invoked + */ + @Override + public void setNull(int index) { + throw new UnsupportedOperationException("The method setNull() is not supported on UnionVector."); + } +} diff --git a/java/vector/target/codegen/templates/UnionWriter.java b/java/vector/target/codegen/templates/UnionWriter.java new file mode 100644 index 000000000000..08dbf24324b1 --- /dev/null +++ b/java/vector/target/codegen/templates/UnionWriter.java @@ -0,0 +1,459 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.complex.impl.NullableStructWriterFactory; +import org.apache.arrow.vector.types.Types; + +<@pp.dropOutputFile /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/complex/impl/UnionWriter.java" /> + + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.complex.impl; + +<#include "/@includes/vv_imports.ftl" /> +import org.apache.arrow.vector.complex.writer.BaseWriter; +import org.apache.arrow.vector.types.Types.MinorType; + +<#function is_timestamp_tz type> + <#return type?starts_with("TimeStamp") && type?ends_with("TZ")> + + + +/* + * This class is generated using freemarker and the ${.template_name} template. + */ +@SuppressWarnings("unused") +public class UnionWriter extends AbstractFieldWriter implements FieldWriter { + + UnionVector data; + private StructWriter structWriter; + private UnionListWriter listWriter; + private UnionMapWriter mapWriter; + private List writers = new java.util.ArrayList<>(); + private final NullableStructWriterFactory nullableStructWriterFactory; + + public UnionWriter(UnionVector vector) { + this(vector, NullableStructWriterFactory.getNullableStructWriterFactoryInstance()); + } + + public UnionWriter(UnionVector vector, NullableStructWriterFactory nullableStructWriterFactory) { + data = vector; + this.nullableStructWriterFactory = nullableStructWriterFactory; + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + for (BaseWriter writer : writers) { + writer.setPosition(index); + } + } + + + @Override + public void start() { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().start(); + } + + @Override + public void end() { + getStructWriter().end(); + } + + @Override + public void startList() { + getListWriter().startList(); + data.setType(idx(), MinorType.LIST); + } + + @Override + public void endList() { + getListWriter().endList(); + } + + @Override + public void startMap() { + getMapWriter().startMap(); + data.setType(idx(), MinorType.MAP); + } + + @Override + public void endMap() { + getMapWriter().endMap(); + } + + @Override + public void startEntry() { + getMapWriter().startEntry(); + } + + @Override + public MapWriter key() { + return getMapWriter().key(); + } + + @Override + public MapWriter value() { + return getMapWriter().value(); + } + + @Override + public void endEntry() { + getMapWriter().endEntry(); + } + + private StructWriter getStructWriter() { + if (structWriter == null) { + structWriter = nullableStructWriterFactory.build(data.getStruct()); + structWriter.setPosition(idx()); + writers.add(structWriter); + } + return structWriter; + } + + public StructWriter asStruct() { + data.setType(idx(), MinorType.STRUCT); + return getStructWriter(); + } + + private ListWriter getListWriter() { + if (listWriter == null) { + listWriter = new UnionListWriter(data.getList(), nullableStructWriterFactory); + listWriter.setPosition(idx()); + writers.add(listWriter); + } + return listWriter; + } + + public ListWriter asList() { + data.setType(idx(), MinorType.LIST); + return getListWriter(); + } + + private MapWriter getMapWriter() { + if (mapWriter == null) { + mapWriter = new UnionMapWriter(data.getMap(new ArrowType.Map(false))); + mapWriter.setPosition(idx()); + writers.add(mapWriter); + } + return mapWriter; + } + + private MapWriter getMapWriter(ArrowType arrowType) { + if (mapWriter == null) { + mapWriter = new UnionMapWriter(data.getMap(arrowType)); + mapWriter.setPosition(idx()); + writers.add(mapWriter); + } + return mapWriter; + } + + public MapWriter asMap(ArrowType arrowType) { + data.setType(idx(), MinorType.MAP); + return getMapWriter(arrowType); + } + + BaseWriter getWriter(MinorType minorType) { + return getWriter(minorType, null); + } + + BaseWriter getWriter(MinorType minorType, ArrowType arrowType) { + switch (minorType) { + case STRUCT: + return getStructWriter(); + case LIST: + return getListWriter(); + case MAP: + return getMapWriter(arrowType); + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + case ${name?upper_case}: + <#if minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + return get${name}Writer(arrowType); + <#else> + return get${name}Writer(); + + + + + default: + throw new UnsupportedOperationException("Unknown type: " + minorType); + } + } + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#assign friendlyType = (minor.friendlyType!minor.boxedType!type.boxedType) /> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + + private ${name}Writer ${name?uncap_first}Writer; + + <#if minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + private ${name}Writer get${name}Writer(ArrowType arrowType) { + if (${uncappedName}Writer == null) { + ${uncappedName}Writer = new ${name}WriterImpl(data.get${name}Vector(arrowType)); + ${uncappedName}Writer.setPosition(idx()); + writers.add(${uncappedName}Writer); + } + return ${uncappedName}Writer; + } + + public ${name}Writer as${name}(ArrowType arrowType) { + data.setType(idx(), MinorType.${name?upper_case}); + return get${name}Writer(arrowType); + } + <#else> + private ${name}Writer get${name}Writer() { + if (${uncappedName}Writer == null) { + ${uncappedName}Writer = new ${name}WriterImpl(data.get${name}Vector()); + ${uncappedName}Writer.setPosition(idx()); + writers.add(${uncappedName}Writer); + } + return ${uncappedName}Writer; + } + + public ${name}Writer as${name}() { + data.setType(idx(), MinorType.${name?upper_case}); + return get${name}Writer(); + } + + + @Override + public void write(${name}Holder holder) { + data.setType(idx(), MinorType.${name?upper_case}); + <#if minor.class?starts_with("Decimal")> + ArrowType arrowType = new ArrowType.Decimal(holder.precision, holder.scale, ${name}Holder.WIDTH * 8); + get${name}Writer(arrowType).setPosition(idx()); + get${name}Writer(arrowType).write${name}(<#list fields as field>holder.${field.name}<#if field_has_next>, , arrowType); + <#elseif is_timestamp_tz(minor.class)> + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.${name?upper_case?remove_ending("TZ")}.getType(); + ArrowType arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), holder.timezone); + get${name}Writer(arrowType).setPosition(idx()); + get${name}Writer(arrowType).write(holder); + <#elseif minor.class == "Duration"> + ArrowType arrowType = new ArrowType.Duration(holder.unit); + get${name}Writer(arrowType).setPosition(idx()); + get${name}Writer(arrowType).write(holder); + <#elseif minor.class == "FixedSizeBinary"> + ArrowType arrowType = new ArrowType.FixedSizeBinary(holder.byteWidth); + get${name}Writer(arrowType).setPosition(idx()); + get${name}Writer(arrowType).write(holder); + <#else> + get${name}Writer().setPosition(idx()); + get${name}Writer().write${name}(<#list fields as field>holder.${field.name}<#if field_has_next>, ); + + } + + public void write${minor.class}(<#list fields as field>${field.type} ${field.name}<#if field_has_next>, <#if minor.class?starts_with("Decimal")>, ArrowType arrowType) { + data.setType(idx(), MinorType.${name?upper_case}); + <#if minor.class?starts_with("Decimal")> + get${name}Writer(arrowType).setPosition(idx()); + get${name}Writer(arrowType).write${name}(<#list fields as field>${field.name}<#if field_has_next>, , arrowType); + <#elseif is_timestamp_tz(minor.class)> + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.${name?upper_case?remove_ending("TZ")}.getType(); + ArrowType arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), "UTC"); + get${name}Writer(arrowType).setPosition(idx()); + get${name}Writer(arrowType).write${name}(<#list fields as field>${field.name}<#if field_has_next>, ); + <#elseif minor.class == "Duration" || minor.class == "FixedSizeBinary"> + // This is expected to throw. There's nothing more that we can do here since we can't infer any + // sort of default unit for the Duration or a default width for the FixedSizeBinary types. + ArrowType arrowType = MinorType.${name?upper_case}.getType(); + get${name}Writer(arrowType).setPosition(idx()); + get${name}Writer(arrowType).write${name}(<#list fields as field>${field.name}<#if field_has_next>, ); + <#else> + get${name}Writer().setPosition(idx()); + get${name}Writer().write${name}(<#list fields as field>${field.name}<#if field_has_next>, ); + + } + <#if minor.class?starts_with("Decimal")> + public void write${name}(${friendlyType} value) { + data.setType(idx(), MinorType.${name?upper_case}); + ArrowType arrowType = new ArrowType.Decimal(value.precision(), value.scale(), ${name}Vector.TYPE_WIDTH * 8); + get${name}Writer(arrowType).setPosition(idx()); + get${name}Writer(arrowType).write${name}(value); + } + + public void writeBigEndianBytesTo${name}(byte[] value, ArrowType arrowType) { + data.setType(idx(), MinorType.${name?upper_case}); + get${name}Writer(arrowType).setPosition(idx()); + get${name}Writer(arrowType).writeBigEndianBytesTo${name}(value, arrowType); + } + <#elseif minor.class?ends_with("VarBinary")> + @Override + public void write${minor.class}(byte[] value) { + get${name}Writer().setPosition(idx()); + get${name}Writer().write${minor.class}(value); + } + + @Override + public void write${minor.class}(byte[] value, int offset, int length) { + get${name}Writer().setPosition(idx()); + get${name}Writer().write${minor.class}(value, offset, length); + } + + @Override + public void write${minor.class}(ByteBuffer value) { + get${name}Writer().setPosition(idx()); + get${name}Writer().write${minor.class}(value); + } + + @Override + public void write${minor.class}(ByteBuffer value, int offset, int length) { + get${name}Writer().setPosition(idx()); + get${name}Writer().write${minor.class}(value, offset, length); + } + <#elseif minor.class?ends_with("VarChar")> + @Override + public void write${minor.class}(${friendlyType} value) { + get${name}Writer().setPosition(idx()); + get${name}Writer().write${minor.class}(value); + } + + @Override + public void write${minor.class}(String value) { + get${name}Writer().setPosition(idx()); + get${name}Writer().write${minor.class}(value); + } + + + + + + public void writeNull() { + } + + @Override + public StructWriter struct() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().struct(); + } + + @Override + public ListWriter list() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().list(); + } + + @Override + public ListWriter list(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().list(name); + } + + @Override + public StructWriter struct(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().struct(name); + } + + @Override + public MapWriter map() { + data.setType(idx(), MinorType.MAP); + getListWriter().setPosition(idx()); + return getListWriter().map(); + } + + @Override + public MapWriter map(boolean keysSorted) { + data.setType(idx(), MinorType.MAP); + getListWriter().setPosition(idx()); + return getListWriter().map(keysSorted); + } + + @Override + public MapWriter map(String name) { + data.setType(idx(), MinorType.MAP); + getStructWriter().setPosition(idx()); + return getStructWriter().map(name); + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + data.setType(idx(), MinorType.MAP); + getStructWriter().setPosition(idx()); + return getStructWriter().map(name, keysSorted); + } + + <#list vv.types as type><#list type.minor as minor> + <#assign lowerName = minor.class?uncap_first /> + <#if lowerName == "int" ><#assign lowerName = "integer" /> + <#assign upperName = minor.class?upper_case /> + <#assign capName = minor.class?cap_first /> + <#if !minor.typeParams?? || minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + @Override + public ${capName}Writer ${lowerName}(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().${lowerName}(name); + } + + @Override + public ${capName}Writer ${lowerName}() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().${lowerName}(); + } + + <#if minor.class?starts_with("Decimal") || is_timestamp_tz(minor.class) || minor.class == "Duration" || minor.class == "FixedSizeBinary"> + @Override + public ${capName}Writer ${lowerName}(String name<#list minor.typeParams as typeParam>, ${typeParam.type} ${typeParam.name}) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().${lowerName}(name<#list minor.typeParams as typeParam>, ${typeParam.name}); + } + + + + @Override + public void allocate() { + data.allocateNew(); + } + + @Override + public void clear() { + data.clear(); + } + + @Override + public void close() throws Exception { + data.close(); + } + + @Override + public Field getField() { + return data.getField(); + } + + @Override + public int getValueCapacity() { + return data.getValueCapacity(); + } +} diff --git a/java/vector/target/codegen/templates/ValueHolders.java b/java/vector/target/codegen/templates/ValueHolders.java new file mode 100644 index 000000000000..973efd870a66 --- /dev/null +++ b/java/vector/target/codegen/templates/ValueHolders.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +<@pp.dropOutputFile /> +<#list vv.modes as mode> +<#list vv.types as type> +<#list type.minor as minor> + +<#assign className="${mode.prefix}${minor.class}Holder" /> +<@pp.changeOutputFile name="/org/apache/arrow/vector/holders/${className}.java" /> + +<#include "/@includes/license.ftl" /> + +package org.apache.arrow.vector.holders; + +<#include "/@includes/vv_imports.ftl" /> + +/** + * Source code generated using FreeMarker template ${.template_name} + */ +public final class ${className} implements ValueHolder{ + + <#if mode.name == "Repeated"> + + /** The first index (inclusive) into the Vector. **/ + public int start; + + /** The last index (exclusive) into the Vector. **/ + public int end; + + /** The Vector holding the actual values. **/ + public ${minor.class}Vector vector; + + <#else> + public static final int WIDTH = ${type.width}; + + <#if mode.name == "Optional">public int isSet; + <#else>public final int isSet = 1; + <#assign fields = (minor.fields!type.fields) + (minor.typeParams![]) /> + <#list fields as field> + public ${field.type} ${field.name}; + + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + + +} + + + + \ No newline at end of file diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/DenseUnionVector.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/DenseUnionVector.java new file mode 100644 index 000000000000..fb096c8266db --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/DenseUnionVector.java @@ -0,0 +1,1938 @@ + + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import org.apache.arrow.memory.util.CommonUtil; +import org.apache.arrow.memory.util.hash.ArrowBufHasher; +import org.apache.arrow.memory.util.hash.SimpleHasher; +import org.apache.arrow.vector.compare.VectorVisitor; +import org.apache.arrow.vector.complex.impl.ComplexCopier; +import org.apache.arrow.vector.util.CallBack; +import org.apache.arrow.vector.ipc.message.ArrowFieldNode; +import org.apache.arrow.vector.BaseValueVector; +import org.apache.arrow.vector.util.OversizedAllocationException; +import org.apache.arrow.util.Preconditions; + +import static org.apache.arrow.vector.types.UnionMode.Dense; + + + +/* + * This class is generated using freemarker and the DenseUnionVector.java template. + */ +@SuppressWarnings("unused") + + +/** + * A vector which can hold values of different types. It does so by using a StructVector which contains a vector for each + * primitive type that is stored. StructVector is used in order to take advantage of its serialization/deserialization methods, + * as well as the addOrGet method. + * + * For performance reasons, DenseUnionVector stores a cached reference to each subtype vector, to avoid having to do the struct lookup + * each time the vector is accessed. + * Source code generated using FreeMarker template DenseUnionVector.java + */ +public class DenseUnionVector extends AbstractContainerVector implements FieldVector { + int valueCount; + + NonNullableStructVector internalStruct; + private ArrowBuf typeBuffer; + private ArrowBuf offsetBuffer; + + /** + * The key is type Id, and the value is vector. + */ + private ValueVector[] childVectors = new ValueVector[Byte.MAX_VALUE + 1]; + + /** + * The index is the type id, and the value is the type field. + */ + private Field[] typeFields = new Field[Byte.MAX_VALUE + 1]; + /** + * The index is the index into the typeFields array, and the value is the logical field id. + */ + private byte[] typeMapFields = new byte[Byte.MAX_VALUE + 1]; + + /** + * The next type id to allocate. + */ + private byte nextTypeId = 0; + + private FieldReader reader; + + private long typeBufferAllocationSizeInBytes; + private long offsetBufferAllocationSizeInBytes; + + private final FieldType fieldType; + + public static final byte TYPE_WIDTH = 1; + public static final byte OFFSET_WIDTH = 4; + + private static final FieldType INTERNAL_STRUCT_TYPE = new FieldType(/*nullable*/ false, + ArrowType.Struct.INSTANCE, /*dictionary*/ null, /*metadata*/ null); + + public static DenseUnionVector empty(String name, BufferAllocator allocator) { + FieldType fieldType = FieldType.notNullable(new ArrowType.Union( + UnionMode.Dense, null)); + return new DenseUnionVector(name, allocator, fieldType, null); + } + + public DenseUnionVector(String name, BufferAllocator allocator, FieldType fieldType, CallBack callBack) { + super(name, allocator, callBack); + this.fieldType = fieldType; + this.internalStruct = new NonNullableStructVector( + "internal", + allocator, + INTERNAL_STRUCT_TYPE, + callBack, + AbstractStructVector.ConflictPolicy.CONFLICT_REPLACE, + false); + this.typeBuffer = allocator.getEmpty(); + this.typeBufferAllocationSizeInBytes = BaseValueVector.INITIAL_VALUE_ALLOCATION * TYPE_WIDTH; + this.offsetBuffer = allocator.getEmpty(); + this.offsetBufferAllocationSizeInBytes = BaseValueVector.INITIAL_VALUE_ALLOCATION * OFFSET_WIDTH; + } + + public BufferAllocator getAllocator() { + return allocator; + } + + @Override + public MinorType getMinorType() { + return MinorType.DENSEUNION; + } + + @Override + public void initializeChildrenFromFields(List children) { + for (Field field : children) { + byte typeId = registerNewTypeId(field); + FieldVector vector = (FieldVector) internalStruct.add(field.getName(), field.getFieldType()); + vector.initializeChildrenFromFields(field.getChildren()); + childVectors[typeId] = vector; + } + } + + @Override + public List getChildrenFromFields() { + return internalStruct.getChildrenFromFields(); + } + + @Override + public void loadFieldBuffers(ArrowFieldNode fieldNode, List ownBuffers) { + if (ownBuffers.size() != 2) { + throw new IllegalArgumentException("Illegal buffer count for dense union with type " + getField().getFieldType() + + ", expected " + 2 + ", got: " + ownBuffers.size()); + } + + ArrowBuf buffer = ownBuffers.get(0); + typeBuffer.getReferenceManager().release(); + typeBuffer = buffer.getReferenceManager().retain(buffer, allocator); + typeBufferAllocationSizeInBytes = typeBuffer.capacity(); + + buffer = ownBuffers.get(1); + offsetBuffer.getReferenceManager().release(); + offsetBuffer = buffer.getReferenceManager().retain(buffer, allocator); + offsetBufferAllocationSizeInBytes = offsetBuffer.capacity(); + + this.valueCount = fieldNode.getLength(); + } + + @Override + public List getFieldBuffers() { + List result = new ArrayList<>(2); + setReaderAndWriterIndex(); + result.add(typeBuffer); + result.add(offsetBuffer); + + return result; + } + + private void setReaderAndWriterIndex() { + typeBuffer.readerIndex(0); + typeBuffer.writerIndex(valueCount * TYPE_WIDTH); + + offsetBuffer.readerIndex(0); + offsetBuffer.writerIndex((long) valueCount * OFFSET_WIDTH); + } + + /** + * Get the inner vectors. + * + * @deprecated This API will be removed as the current implementations no longer support inner vectors. + * + * @return the inner vectors for this field as defined by the TypeLayout + */ + @Override + @Deprecated + public List getFieldInnerVectors() { + throw new UnsupportedOperationException("There are no inner vectors. Use geFieldBuffers"); + } + + private String fieldName(byte typeId, MinorType type) { + return type.name().toLowerCase() + typeId; + } + + private FieldType fieldType(MinorType type) { + return FieldType.nullable(type.getType()); + } + + public synchronized byte registerNewTypeId(Field field) { + if (nextTypeId == typeFields.length) { + throw new IllegalStateException("Dense union vector support at most " + + typeFields.length + " relative types. Please use union of union instead"); + } + byte typeId = nextTypeId; + if (this.fieldType != null) { + int[] typeIds = ((ArrowType.Union) this.fieldType.getType()).getTypeIds(); + if (typeIds != null) { + int thisTypeId = typeIds[nextTypeId]; + if (thisTypeId > Byte.MAX_VALUE) { + throw new IllegalStateException("Dense union vector types must be bytes. " + thisTypeId + " is too large"); + } + typeId = (byte) thisTypeId; + } + } + typeFields[typeId] = field; + typeMapFields[nextTypeId] = typeId; + this.nextTypeId += 1; + return typeId; + } + + private T addOrGet(byte typeId, MinorType minorType, Class c) { + return internalStruct.addOrGet(fieldName(typeId, minorType), fieldType(minorType), c); + } + + private T addOrGet(byte typeId, MinorType minorType, ArrowType arrowType, Class c) { + return internalStruct.addOrGet(fieldName(typeId, minorType), FieldType.nullable(arrowType), c); + } + + @Override + public long getOffsetBufferAddress() { + return offsetBuffer.memoryAddress(); + } + + @Override + public long getDataBufferAddress() { + throw new UnsupportedOperationException(); + } + + @Override + public long getValidityBufferAddress() { + throw new UnsupportedOperationException(); + } + + @Override + public ArrowBuf getValidityBuffer() { throw new UnsupportedOperationException(); } + + @Override + public ArrowBuf getOffsetBuffer() { return offsetBuffer; } + + public ArrowBuf getTypeBuffer() { return typeBuffer; } + + @Override + public ArrowBuf getDataBuffer() { throw new UnsupportedOperationException(); } + + public StructVector getStruct(byte typeId) { + StructVector structVector = typeId < 0 ? null : (StructVector) childVectors[typeId]; + if (structVector == null) { + int vectorCount = internalStruct.size(); + structVector = addOrGet(typeId, MinorType.STRUCT, StructVector.class); + if (internalStruct.size() > vectorCount) { + structVector.allocateNew(); + childVectors[typeId] = structVector; + if (callBack != null) { + callBack.doWork(); + } + } + } + return structVector; + } + + + public TinyIntVector getTinyIntVector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.TINYINT, TinyIntVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (TinyIntVector) vector; + } + + public UInt1Vector getUInt1Vector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.UINT1, UInt1Vector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (UInt1Vector) vector; + } + + public UInt2Vector getUInt2Vector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.UINT2, UInt2Vector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (UInt2Vector) vector; + } + + public SmallIntVector getSmallIntVector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.SMALLINT, SmallIntVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (SmallIntVector) vector; + } + + public Float2Vector getFloat2Vector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.FLOAT2, Float2Vector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (Float2Vector) vector; + } + + public IntVector getIntVector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.INT, IntVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (IntVector) vector; + } + + public UInt4Vector getUInt4Vector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.UINT4, UInt4Vector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (UInt4Vector) vector; + } + + public Float4Vector getFloat4Vector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.FLOAT4, Float4Vector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (Float4Vector) vector; + } + + public DateDayVector getDateDayVector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.DATEDAY, DateDayVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (DateDayVector) vector; + } + + public IntervalYearVector getIntervalYearVector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.INTERVALYEAR, IntervalYearVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (IntervalYearVector) vector; + } + + public TimeSecVector getTimeSecVector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.TIMESEC, TimeSecVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (TimeSecVector) vector; + } + + public TimeMilliVector getTimeMilliVector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.TIMEMILLI, TimeMilliVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (TimeMilliVector) vector; + } + + public BigIntVector getBigIntVector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.BIGINT, BigIntVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (BigIntVector) vector; + } + + public UInt8Vector getUInt8Vector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.UINT8, UInt8Vector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (UInt8Vector) vector; + } + + public Float8Vector getFloat8Vector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.FLOAT8, Float8Vector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (Float8Vector) vector; + } + + public DateMilliVector getDateMilliVector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.DATEMILLI, DateMilliVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (DateMilliVector) vector; + } + + public TimeStampSecVector getTimeStampSecVector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.TIMESTAMPSEC, TimeStampSecVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (TimeStampSecVector) vector; + } + + public TimeStampMilliVector getTimeStampMilliVector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.TIMESTAMPMILLI, TimeStampMilliVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (TimeStampMilliVector) vector; + } + + public TimeStampMicroVector getTimeStampMicroVector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.TIMESTAMPMICRO, TimeStampMicroVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (TimeStampMicroVector) vector; + } + + public TimeStampNanoVector getTimeStampNanoVector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.TIMESTAMPNANO, TimeStampNanoVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (TimeStampNanoVector) vector; + } + + public TimeMicroVector getTimeMicroVector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.TIMEMICRO, TimeMicroVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (TimeMicroVector) vector; + } + + public TimeNanoVector getTimeNanoVector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.TIMENANO, TimeNanoVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (TimeNanoVector) vector; + } + + public IntervalDayVector getIntervalDayVector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.INTERVALDAY, IntervalDayVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (IntervalDayVector) vector; + } + + public IntervalMonthDayNanoVector getIntervalMonthDayNanoVector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.INTERVALMONTHDAYNANO, IntervalMonthDayNanoVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (IntervalMonthDayNanoVector) vector; + } + + public Decimal256Vector getDecimal256Vector(byte typeId, ArrowType arrowType) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.DECIMAL256, arrowType, Decimal256Vector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (Decimal256Vector) vector; + } + + public DecimalVector getDecimalVector(byte typeId, ArrowType arrowType) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.DECIMAL, arrowType, DecimalVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (DecimalVector) vector; + } + + public VarBinaryVector getVarBinaryVector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.VARBINARY, VarBinaryVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (VarBinaryVector) vector; + } + + public VarCharVector getVarCharVector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.VARCHAR, VarCharVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (VarCharVector) vector; + } + + public LargeVarCharVector getLargeVarCharVector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.LARGEVARCHAR, LargeVarCharVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (LargeVarCharVector) vector; + } + + public LargeVarBinaryVector getLargeVarBinaryVector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.LARGEVARBINARY, LargeVarBinaryVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (LargeVarBinaryVector) vector; + } + + public BitVector getBitVector(byte typeId) { + ValueVector vector = typeId < 0 ? null : childVectors[typeId]; + if (vector == null) { + int vectorCount = internalStruct.size(); + vector = addOrGet(typeId, MinorType.BIT, BitVector.class); + childVectors[typeId] = vector; + if (internalStruct.size() > vectorCount) { + vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return (BitVector) vector; + } + + public ListVector getList(byte typeId) { + ListVector listVector = typeId < 0 ? null : (ListVector) childVectors[typeId]; + if (listVector == null) { + int vectorCount = internalStruct.size(); + listVector = addOrGet(typeId, MinorType.LIST, ListVector.class); + if (internalStruct.size() > vectorCount) { + listVector.allocateNew(); + childVectors[typeId] = listVector; + if (callBack != null) { + callBack.doWork(); + } + } + } + return listVector; + } + + public MapVector getMap(byte typeId) { + MapVector mapVector = typeId < 0 ? null : (MapVector) childVectors[typeId]; + if (mapVector == null) { + int vectorCount = internalStruct.size(); + mapVector = addOrGet(typeId, MinorType.MAP, MapVector.class); + if (internalStruct.size() > vectorCount) { + mapVector.allocateNew(); + childVectors[typeId] = mapVector; + if (callBack != null) { + callBack.doWork(); + } + } + } + return mapVector; + } + + public byte getTypeId(int index) { + return typeBuffer.getByte(index * TYPE_WIDTH); + } + + public ValueVector getVectorByType(byte typeId) { + return typeId < 0 ? null : childVectors[typeId]; + } + + @Override + public void allocateNew() throws OutOfMemoryException { + /* new allocation -- clear the current buffers */ + clear(); + internalStruct.allocateNew(); + try { + allocateTypeBuffer(); + allocateOffsetBuffer(); + } catch (Exception e) { + clear(); + throw e; + } + } + + @Override + public boolean allocateNewSafe() { + /* new allocation -- clear the current buffers */ + clear(); + boolean safe = internalStruct.allocateNewSafe(); + if (!safe) { return false; } + try { + allocateTypeBuffer(); + allocateOffsetBuffer(); + } catch (Exception e) { + clear(); + return false; + } + + return true; + } + + private void allocateTypeBuffer() { + typeBuffer = allocator.buffer(typeBufferAllocationSizeInBytes); + typeBuffer.readerIndex(0); + setNegative(0, typeBuffer.capacity()); + } + + private void allocateOffsetBuffer() { + offsetBuffer = allocator.buffer(offsetBufferAllocationSizeInBytes); + offsetBuffer.readerIndex(0); + offsetBuffer.setZero(0, offsetBuffer.capacity()); + } + + + @Override + public void reAlloc() { + internalStruct.reAlloc(); + reallocTypeBuffer(); + reallocOffsetBuffer(); + } + + public int getOffset(int index) { + return offsetBuffer.getInt((long) index * OFFSET_WIDTH); + } + + private void reallocTypeBuffer() { + final long currentBufferCapacity = typeBuffer.capacity(); + long newAllocationSize = currentBufferCapacity * 2; + if (newAllocationSize == 0) { + if (typeBufferAllocationSizeInBytes > 0) { + newAllocationSize = typeBufferAllocationSizeInBytes; + } else { + newAllocationSize = BaseValueVector.INITIAL_VALUE_ALLOCATION * TYPE_WIDTH * 2; + } + } + + newAllocationSize = CommonUtil.nextPowerOfTwo(newAllocationSize); + assert newAllocationSize >= 1; + + if (newAllocationSize > BaseValueVector.MAX_ALLOCATION_SIZE) { + throw new OversizedAllocationException("Unable to expand the buffer"); + } + + final ArrowBuf newBuf = allocator.buffer((int)newAllocationSize); + newBuf.setBytes(0, typeBuffer, 0, currentBufferCapacity); + typeBuffer.getReferenceManager().release(1); + typeBuffer = newBuf; + typeBufferAllocationSizeInBytes = (int)newAllocationSize; + setNegative(currentBufferCapacity, newBuf.capacity() - currentBufferCapacity); + } + + private void reallocOffsetBuffer() { + final long currentBufferCapacity = offsetBuffer.capacity(); + long newAllocationSize = currentBufferCapacity * 2; + if (newAllocationSize == 0) { + if (offsetBufferAllocationSizeInBytes > 0) { + newAllocationSize = offsetBufferAllocationSizeInBytes; + } else { + newAllocationSize = BaseValueVector.INITIAL_VALUE_ALLOCATION * OFFSET_WIDTH * 2; + } + } + + newAllocationSize = CommonUtil.nextPowerOfTwo(newAllocationSize); + assert newAllocationSize >= 1; + + if (newAllocationSize > BaseValueVector.MAX_ALLOCATION_SIZE) { + throw new OversizedAllocationException("Unable to expand the buffer"); + } + + final ArrowBuf newBuf = allocator.buffer((int) newAllocationSize); + newBuf.setBytes(0, offsetBuffer, 0, currentBufferCapacity); + newBuf.setZero(currentBufferCapacity, newBuf.capacity() - currentBufferCapacity); + offsetBuffer.getReferenceManager().release(1); + offsetBuffer = newBuf; + offsetBufferAllocationSizeInBytes = (int) newAllocationSize; + } + + @Override + public void setInitialCapacity(int numRecords) { } + + @Override + public int getValueCapacity() { + long capacity = getTypeBufferValueCapacity(); + long offsetCapacity = getOffsetBufferValueCapacity(); + if (offsetCapacity < capacity) { + capacity = offsetCapacity; + } + long structCapacity = internalStruct.getValueCapacity(); + if (structCapacity < capacity) { + structCapacity = capacity; + } + return (int) capacity; + } + + @Override + public void close() { + clear(); + } + + @Override + public void clear() { + valueCount = 0; + typeBuffer.getReferenceManager().release(); + typeBuffer = allocator.getEmpty(); + offsetBuffer.getReferenceManager().release(); + offsetBuffer = allocator.getEmpty(); + internalStruct.clear(); + } + + @Override + public void reset() { + valueCount = 0; + setNegative(0, typeBuffer.capacity()); + offsetBuffer.setZero(0, offsetBuffer.capacity()); + internalStruct.reset(); + } + + @Override + public Field getField() { + int childCount = (int) Arrays.stream(typeFields).filter(field -> field != null).count(); + List childFields = new ArrayList<>(childCount); + int[] typeIds = new int[childCount]; + for (int i = 0; i < typeFields.length; i++) { + if (typeFields[i] != null) { + int curIdx = childFields.size(); + typeIds[curIdx] = i; + childFields.add(typeFields[i]); + } + } + + FieldType fieldType; + if (this.fieldType == null) { + fieldType = FieldType.nullable(new ArrowType.Union(Dense, typeIds)); + } else { + final UnionMode mode = UnionMode.Dense; + fieldType = new FieldType(this.fieldType.isNullable(), new ArrowType.Union(mode, typeIds), + this.fieldType.getDictionary(), this.fieldType.getMetadata()); + } + + return new Field(name, fieldType, childFields); + } + + @Override + public TransferPair getTransferPair(BufferAllocator allocator) { + return getTransferPair(name, allocator); + } + + @Override + public TransferPair getTransferPair(String ref, BufferAllocator allocator) { + return getTransferPair(ref, allocator, null); + } + + @Override + public TransferPair getTransferPair(String ref, BufferAllocator allocator, CallBack callBack) { + return new org.apache.arrow.vector.complex.DenseUnionVector.TransferImpl(ref, allocator, callBack); + } + + @Override + public TransferPair getTransferPair(Field field, BufferAllocator allocator) { + return getTransferPair(field, allocator, null); + } + + @Override + public TransferPair getTransferPair(Field field, BufferAllocator allocator, CallBack callBack) { + return new org.apache.arrow.vector.complex.DenseUnionVector.TransferImpl(field, allocator, callBack); + } + + @Override + public TransferPair makeTransferPair(ValueVector target) { + return new TransferImpl((DenseUnionVector) target); + } + + @Override + public void copyFrom(int inIndex, int outIndex, ValueVector from) { + Preconditions.checkArgument(this.getMinorType() == from.getMinorType()); + DenseUnionVector fromCast = (DenseUnionVector) from; + int inOffset = fromCast.offsetBuffer.getInt((long) inIndex * OFFSET_WIDTH); + fromCast.getReader().setPosition(inOffset); + int outOffset = offsetBuffer.getInt((long) outIndex * OFFSET_WIDTH); + getWriter().setPosition(outOffset); + ComplexCopier.copy(fromCast.reader, writer); + } + + @Override + public void copyFromSafe(int inIndex, int outIndex, ValueVector from) { + copyFrom(inIndex, outIndex, from); + } + + public FieldVector addVector(byte typeId, FieldVector v) { + final String name = v.getName().isEmpty() ? fieldName(typeId, v.getMinorType()) : v.getName(); + Preconditions.checkState(internalStruct.getChild(name) == null, String.format("%s vector already exists", name)); + final FieldVector newVector = internalStruct.addOrGet(name, v.getField().getFieldType(), v.getClass()); + v.makeTransferPair(newVector).transfer(); + internalStruct.putChild(name, newVector); + childVectors[typeId] = newVector; + if (callBack != null) { + callBack.doWork(); + } + return newVector; + } + + private class TransferImpl implements TransferPair { + private final TransferPair[] internalTransferPairs = new TransferPair[nextTypeId]; + private final DenseUnionVector to; + + public TransferImpl(String name, BufferAllocator allocator, CallBack callBack) { + to = new DenseUnionVector(name, allocator, null, callBack); + internalStruct.makeTransferPair(to.internalStruct); + createTransferPairs(); + } + + public TransferImpl(Field field, BufferAllocator allocator, CallBack callBack) { + to = new DenseUnionVector(field.getName(), allocator, null, callBack); + internalStruct.makeTransferPair(to.internalStruct); + createTransferPairs(); + } + + public TransferImpl(DenseUnionVector to) { + this.to = to; + internalStruct.makeTransferPair(to.internalStruct); + createTransferPairs(); + } + + private void createTransferPairs() { + for (int i = 0; i < nextTypeId; i++) { + ValueVector srcVec = internalStruct.getVectorById(i); + ValueVector dstVec = to.internalStruct.getVectorById(i); + to.typeFields[i] = typeFields[i]; + to.typeMapFields[i] = typeMapFields[i]; + to.childVectors[i] = dstVec; + internalTransferPairs[i] = srcVec.makeTransferPair(dstVec); + } + } + + @Override + public void transfer() { + to.clear(); + + ReferenceManager refManager = typeBuffer.getReferenceManager(); + to.typeBuffer = refManager.transferOwnership(typeBuffer, to.allocator).getTransferredBuffer(); + + refManager = offsetBuffer.getReferenceManager(); + to.offsetBuffer = refManager.transferOwnership(offsetBuffer, to.allocator).getTransferredBuffer(); + + for (int i = 0; i < nextTypeId; i++) { + if (internalTransferPairs[i] != null) { + internalTransferPairs[i].transfer(); + to.childVectors[i] = internalTransferPairs[i].getTo(); + } + } + to.valueCount = valueCount; + clear(); + } + + @Override + public void splitAndTransfer(int startIndex, int length) { + to.clear(); + + // transfer type buffer + int startPoint = startIndex * TYPE_WIDTH; + int sliceLength = length * TYPE_WIDTH; + ArrowBuf slicedBuffer = typeBuffer.slice(startPoint, sliceLength); + ReferenceManager refManager = slicedBuffer.getReferenceManager(); + to.typeBuffer = refManager.transferOwnership(slicedBuffer, to.allocator).getTransferredBuffer(); + + // transfer offset buffer + while (to.offsetBuffer.capacity() < (long) length * OFFSET_WIDTH) { + to.reallocOffsetBuffer(); + } + + int [] typeCounts = new int[nextTypeId]; + int [] typeStarts = new int[nextTypeId]; + for (int i = 0; i < typeCounts.length; i++) { + typeCounts[i] = 0; + typeStarts[i] = -1; + } + + for (int i = startIndex; i < startIndex + length; i++) { + byte typeId = typeBuffer.getByte(i); + if (typeId >= 0) { + to.offsetBuffer.setInt((long) (i - startIndex) * OFFSET_WIDTH, typeCounts[typeId]); + typeCounts[typeId] += 1; + if (typeStarts[typeId] == -1) { + typeStarts[typeId] = offsetBuffer.getInt((long) i * OFFSET_WIDTH); + } + } + } + + // transfer vector values + for (int i = 0; i < nextTypeId; i++) { + if (typeCounts[i] > 0 && typeStarts[i] != -1) { + internalTransferPairs[i].splitAndTransfer(typeStarts[i], typeCounts[i]); + to.childVectors[i] = internalTransferPairs[i].getTo(); + } + } + + to.setValueCount(length); + } + + @Override + public ValueVector getTo() { + return to; + } + + @Override + public void copyValueSafe(int from, int to) { + this.to.copyFrom(from, to, DenseUnionVector.this); + } + } + + @Override + public FieldReader getReader() { + if (reader == null) { + reader = new DenseUnionReader(this); + } + return reader; + } + + public FieldWriter getWriter() { + if (writer == null) { + writer = new DenseUnionWriter(this); + } + return writer; + } + + @Override + public int getBufferSize() { + return this.getBufferSizeFor(this.valueCount); + } + + @Override + public int getBufferSizeFor(final int count) { + if (count == 0) { + return 0; + } + + int[] counts = new int[Byte.MAX_VALUE + 1]; + for (int i = 0; i < count; i++) { + byte typeId = getTypeId(i); + if (typeId != -1) { + counts[typeId] += 1; + } + } + + long childBytes = 0; + for (int typeId = 0; typeId < childVectors.length; typeId++) { + ValueVector childVector = childVectors[typeId]; + if (childVector != null) { + childBytes += childVector.getBufferSizeFor(counts[typeId]); + } + } + + return (int) (count * TYPE_WIDTH + (long) count * OFFSET_WIDTH + childBytes); + } + + @Override + public ArrowBuf[] getBuffers(boolean clear) { + List list = new java.util.ArrayList<>(); + setReaderAndWriterIndex(); + if (getBufferSize() != 0) { + list.add(typeBuffer); + list.add(offsetBuffer); + list.addAll(java.util.Arrays.asList(internalStruct.getBuffers(clear))); + } + if (clear) { + valueCount = 0; + typeBuffer.getReferenceManager().retain(); + typeBuffer.close(); + typeBuffer = allocator.getEmpty(); + offsetBuffer.getReferenceManager().retain(); + offsetBuffer.close(); + offsetBuffer = allocator.getEmpty(); + } + return list.toArray(new ArrowBuf[list.size()]); + } + + @Override + public Iterator iterator() { + return internalStruct.iterator(); + } + + private ValueVector getVector(int index) { + byte typeId = typeBuffer.getByte(index * TYPE_WIDTH); + return getVectorByType(typeId); + } + + public Object getObject(int index) { + ValueVector vector = getVector(index); + if (vector != null) { + int offset = offsetBuffer.getInt((long) index * OFFSET_WIDTH); + return vector.isNull(offset) ? null : vector.getObject(offset); + } + return null; + } + + public void get(int index, DenseUnionHolder holder) { + FieldReader reader = new DenseUnionReader(DenseUnionVector.this); + reader.setPosition(index); + holder.reader = reader; + } + + public int getValueCount() { + return valueCount; + } + + /** + * IMPORTANT: Union types always return non null as there is no validity buffer. + * + * To check validity correctly you must check the underlying vector. + */ + public boolean isNull(int index) { + return false; + } + + @Override + public int getNullCount() { + return 0; + } + + public int isSet(int index) { + return isNull(index) ? 0 : 1; + } + + DenseUnionWriter writer; + + public void setValueCount(int valueCount) { + this.valueCount = valueCount; + while (valueCount > getTypeBufferValueCapacity()) { + reallocTypeBuffer(); + reallocOffsetBuffer(); + } + setChildVectorValueCounts(); + } + + private void setChildVectorValueCounts() { + int [] counts = new int[Byte.MAX_VALUE + 1]; + for (int i = 0; i < this.valueCount; i++) { + byte typeId = getTypeId(i); + if (typeId != -1) { + counts[typeId] += 1; + } + } + for (int i = 0; i < nextTypeId; i++) { + childVectors[typeMapFields[i]].setValueCount(counts[typeMapFields[i]]); + } + } + + public void setSafe(int index, DenseUnionHolder holder) { + FieldReader reader = holder.reader; + if (writer == null) { + writer = new DenseUnionWriter(DenseUnionVector.this); + } + int offset = offsetBuffer.getInt((long) index * OFFSET_WIDTH); + MinorType type = reader.getMinorType(); + writer.setPosition(offset); + byte typeId = holder.typeId; + switch (type) { + case TINYINT: + NullableTinyIntHolder tinyIntHolder = new NullableTinyIntHolder(); + reader.read(tinyIntHolder); + setSafe(index, tinyIntHolder); + break; + case UINT1: + NullableUInt1Holder uInt1Holder = new NullableUInt1Holder(); + reader.read(uInt1Holder); + setSafe(index, uInt1Holder); + break; + case UINT2: + NullableUInt2Holder uInt2Holder = new NullableUInt2Holder(); + reader.read(uInt2Holder); + setSafe(index, uInt2Holder); + break; + case SMALLINT: + NullableSmallIntHolder smallIntHolder = new NullableSmallIntHolder(); + reader.read(smallIntHolder); + setSafe(index, smallIntHolder); + break; + case FLOAT2: + NullableFloat2Holder float2Holder = new NullableFloat2Holder(); + reader.read(float2Holder); + setSafe(index, float2Holder); + break; + case INT: + NullableIntHolder intHolder = new NullableIntHolder(); + reader.read(intHolder); + setSafe(index, intHolder); + break; + case UINT4: + NullableUInt4Holder uInt4Holder = new NullableUInt4Holder(); + reader.read(uInt4Holder); + setSafe(index, uInt4Holder); + break; + case FLOAT4: + NullableFloat4Holder float4Holder = new NullableFloat4Holder(); + reader.read(float4Holder); + setSafe(index, float4Holder); + break; + case DATEDAY: + NullableDateDayHolder dateDayHolder = new NullableDateDayHolder(); + reader.read(dateDayHolder); + setSafe(index, dateDayHolder); + break; + case INTERVALYEAR: + NullableIntervalYearHolder intervalYearHolder = new NullableIntervalYearHolder(); + reader.read(intervalYearHolder); + setSafe(index, intervalYearHolder); + break; + case TIMESEC: + NullableTimeSecHolder timeSecHolder = new NullableTimeSecHolder(); + reader.read(timeSecHolder); + setSafe(index, timeSecHolder); + break; + case TIMEMILLI: + NullableTimeMilliHolder timeMilliHolder = new NullableTimeMilliHolder(); + reader.read(timeMilliHolder); + setSafe(index, timeMilliHolder); + break; + case BIGINT: + NullableBigIntHolder bigIntHolder = new NullableBigIntHolder(); + reader.read(bigIntHolder); + setSafe(index, bigIntHolder); + break; + case UINT8: + NullableUInt8Holder uInt8Holder = new NullableUInt8Holder(); + reader.read(uInt8Holder); + setSafe(index, uInt8Holder); + break; + case FLOAT8: + NullableFloat8Holder float8Holder = new NullableFloat8Holder(); + reader.read(float8Holder); + setSafe(index, float8Holder); + break; + case DATEMILLI: + NullableDateMilliHolder dateMilliHolder = new NullableDateMilliHolder(); + reader.read(dateMilliHolder); + setSafe(index, dateMilliHolder); + break; + case TIMESTAMPSEC: + NullableTimeStampSecHolder timeStampSecHolder = new NullableTimeStampSecHolder(); + reader.read(timeStampSecHolder); + setSafe(index, timeStampSecHolder); + break; + case TIMESTAMPMILLI: + NullableTimeStampMilliHolder timeStampMilliHolder = new NullableTimeStampMilliHolder(); + reader.read(timeStampMilliHolder); + setSafe(index, timeStampMilliHolder); + break; + case TIMESTAMPMICRO: + NullableTimeStampMicroHolder timeStampMicroHolder = new NullableTimeStampMicroHolder(); + reader.read(timeStampMicroHolder); + setSafe(index, timeStampMicroHolder); + break; + case TIMESTAMPNANO: + NullableTimeStampNanoHolder timeStampNanoHolder = new NullableTimeStampNanoHolder(); + reader.read(timeStampNanoHolder); + setSafe(index, timeStampNanoHolder); + break; + case TIMEMICRO: + NullableTimeMicroHolder timeMicroHolder = new NullableTimeMicroHolder(); + reader.read(timeMicroHolder); + setSafe(index, timeMicroHolder); + break; + case TIMENANO: + NullableTimeNanoHolder timeNanoHolder = new NullableTimeNanoHolder(); + reader.read(timeNanoHolder); + setSafe(index, timeNanoHolder); + break; + case INTERVALDAY: + NullableIntervalDayHolder intervalDayHolder = new NullableIntervalDayHolder(); + reader.read(intervalDayHolder); + setSafe(index, intervalDayHolder); + break; + case INTERVALMONTHDAYNANO: + NullableIntervalMonthDayNanoHolder intervalMonthDayNanoHolder = new NullableIntervalMonthDayNanoHolder(); + reader.read(intervalMonthDayNanoHolder); + setSafe(index, intervalMonthDayNanoHolder); + break; + case DECIMAL256: + NullableDecimal256Holder decimal256Holder = new NullableDecimal256Holder(); + reader.read(decimal256Holder); + setSafe(index, decimal256Holder); + break; + case DECIMAL: + NullableDecimalHolder decimalHolder = new NullableDecimalHolder(); + reader.read(decimalHolder); + setSafe(index, decimalHolder); + break; + case VARBINARY: + NullableVarBinaryHolder varBinaryHolder = new NullableVarBinaryHolder(); + reader.read(varBinaryHolder); + setSafe(index, varBinaryHolder); + break; + case VARCHAR: + NullableVarCharHolder varCharHolder = new NullableVarCharHolder(); + reader.read(varCharHolder); + setSafe(index, varCharHolder); + break; + case LARGEVARCHAR: + NullableLargeVarCharHolder largeVarCharHolder = new NullableLargeVarCharHolder(); + reader.read(largeVarCharHolder); + setSafe(index, largeVarCharHolder); + break; + case LARGEVARBINARY: + NullableLargeVarBinaryHolder largeVarBinaryHolder = new NullableLargeVarBinaryHolder(); + reader.read(largeVarBinaryHolder); + setSafe(index, largeVarBinaryHolder); + break; + case BIT: + NullableBitHolder bitHolder = new NullableBitHolder(); + reader.read(bitHolder); + setSafe(index, bitHolder); + break; + case STRUCT: + case LIST: { + setTypeId(index, typeId); + ComplexCopier.copy(reader, writer); + break; + } + default: + throw new UnsupportedOperationException(); + } + } + public void setSafe(int index, NullableTinyIntHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + TinyIntVector vector = getTinyIntVector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableUInt1Holder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + UInt1Vector vector = getUInt1Vector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableUInt2Holder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + UInt2Vector vector = getUInt2Vector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableSmallIntHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + SmallIntVector vector = getSmallIntVector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableFloat2Holder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + Float2Vector vector = getFloat2Vector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableIntHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + IntVector vector = getIntVector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableUInt4Holder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + UInt4Vector vector = getUInt4Vector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableFloat4Holder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + Float4Vector vector = getFloat4Vector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableDateDayHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + DateDayVector vector = getDateDayVector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableIntervalYearHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + IntervalYearVector vector = getIntervalYearVector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableTimeSecHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + TimeSecVector vector = getTimeSecVector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableTimeMilliHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + TimeMilliVector vector = getTimeMilliVector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableBigIntHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + BigIntVector vector = getBigIntVector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableUInt8Holder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + UInt8Vector vector = getUInt8Vector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableFloat8Holder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + Float8Vector vector = getFloat8Vector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableDateMilliHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + DateMilliVector vector = getDateMilliVector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableTimeStampSecHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + TimeStampSecVector vector = getTimeStampSecVector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableTimeStampMilliHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + TimeStampMilliVector vector = getTimeStampMilliVector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableTimeStampMicroHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + TimeStampMicroVector vector = getTimeStampMicroVector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableTimeStampNanoHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + TimeStampNanoVector vector = getTimeStampNanoVector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableTimeMicroHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + TimeMicroVector vector = getTimeMicroVector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableTimeNanoHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + TimeNanoVector vector = getTimeNanoVector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableIntervalDayHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + IntervalDayVector vector = getIntervalDayVector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableIntervalMonthDayNanoHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + IntervalMonthDayNanoVector vector = getIntervalMonthDayNanoVector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableDecimal256Holder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + Decimal256Vector vector = getDecimal256Vector(typeId, new ArrowType.Decimal(holder.precision, holder.scale, holder.WIDTH * 8)); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableDecimalHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + DecimalVector vector = getDecimalVector(typeId, new ArrowType.Decimal(holder.precision, holder.scale, holder.WIDTH * 8)); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableVarBinaryHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + VarBinaryVector vector = getVarBinaryVector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableVarCharHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + VarCharVector vector = getVarCharVector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableLargeVarCharHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + LargeVarCharVector vector = getLargeVarCharVector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableLargeVarBinaryHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + LargeVarBinaryVector vector = getLargeVarBinaryVector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + public void setSafe(int index, NullableBitHolder holder) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + byte typeId = getTypeId(index); + BitVector vector = getBitVector(typeId); + int offset = vector.getValueCount(); + vector.setValueCount(offset + 1); + vector.setSafe(offset, holder); + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + + public void setTypeId(int index, byte typeId) { + while (index >= getTypeBufferValueCapacity()) { + reallocTypeBuffer(); + } + typeBuffer.setByte(index * TYPE_WIDTH , typeId); + } + + private int getTypeBufferValueCapacity() { + return (int) typeBuffer.capacity() / TYPE_WIDTH; + } + + public void setOffset(int index, int offset) { + while (index >= getOffsetBufferValueCapacity()) { + reallocOffsetBuffer(); + } + + offsetBuffer.setInt((long) index * OFFSET_WIDTH, offset); + } + + private long getOffsetBufferValueCapacity() { + return offsetBuffer.capacity() / OFFSET_WIDTH; + } + + @Override + public int hashCode(int index, ArrowBufHasher hasher) { + if (isNull(index)) { + return 0; + } + int offset = offsetBuffer.getInt((long) index * OFFSET_WIDTH); + return getVector(index).hashCode(offset, hasher); + } + + @Override + public int hashCode(int index) { + return hashCode(index, SimpleHasher.INSTANCE); + } + + @Override + public OUT accept(VectorVisitor visitor, IN value) { + return visitor.visit(this, value); + } + + @Override + public String getName() { + return name; + } + + private void setNegative(long start, long end) { + for (long i = start;i < end; i++) { + typeBuffer.setByte(i, -1); + } + } + + @Override + public T addOrGet(String name, FieldType fieldType, Class clazz) { + return internalStruct.addOrGet(name, fieldType, clazz); + } + + @Override + public T getChild(String name, Class clazz) { + return internalStruct.getChild(name, clazz); + } + + @Override + public VectorWithOrdinal getChildVectorWithOrdinal(String name) { + return internalStruct.getChildVectorWithOrdinal(name); + } + + @Override + public int size() { + return internalStruct.size(); + } + + @Override + public void setInitialCapacity(int valueCount, double density) { + for (final ValueVector vector : internalStruct) { + if (vector instanceof DensityAwareVector) { + ((DensityAwareVector) vector).setInitialCapacity(valueCount, density); + } else { + vector.setInitialCapacity(valueCount); + } + } + } + + /** + * Set the element at the given index to null. For DenseUnionVector, it throws an UnsupportedOperationException + * as nulls are not supported at the top level and isNull() always returns false. + * + * @param index position of element + * @throws UnsupportedOperationException whenever invoked + */ + @Override + public void setNull(int index) { + throw new UnsupportedOperationException("The method setNull() is not supported on DenseUnionVector."); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/UnionVector.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/UnionVector.java new file mode 100644 index 000000000000..9ba9faadf328 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/UnionVector.java @@ -0,0 +1,2004 @@ + + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import org.apache.arrow.memory.ArrowBuf; +import org.apache.arrow.memory.util.CommonUtil; +import org.apache.arrow.vector.compare.VectorVisitor; +import org.apache.arrow.vector.complex.impl.ComplexCopier; +import org.apache.arrow.vector.util.CallBack; +import org.apache.arrow.vector.util.ValueVectorUtility; +import org.apache.arrow.vector.ipc.message.ArrowFieldNode; +import org.apache.arrow.memory.util.ArrowBufPointer; +import org.apache.arrow.memory.util.hash.ArrowBufHasher; +import org.apache.arrow.vector.BaseValueVector; +import org.apache.arrow.vector.util.OversizedAllocationException; +import org.apache.arrow.util.Preconditions; + +import static org.apache.arrow.vector.types.UnionMode.Sparse; +import static org.apache.arrow.memory.util.LargeMemoryUtil.checkedCastToInt; +import static org.apache.arrow.memory.util.LargeMemoryUtil.capAtMaxInt; + + +/* + * This class is generated using freemarker and the UnionVector.java template. + */ +@SuppressWarnings("unused") + + +/** + * A vector which can hold values of different types. It does so by using a StructVector which contains a vector for each + * primitive type that is stored. StructVector is used in order to take advantage of its serialization/deserialization methods, + * as well as the addOrGet method. + * + * For performance reasons, UnionVector stores a cached reference to each subtype vector, to avoid having to do the struct lookup + * each time the vector is accessed. + * Source code generated using FreeMarker template UnionVector.java + */ +public class UnionVector extends AbstractContainerVector implements FieldVector { + int valueCount; + + NonNullableStructVector internalStruct; + protected ArrowBuf typeBuffer; + + private StructVector structVector; + private ListVector listVector; + private MapVector mapVector; + + private FieldReader reader; + + private int singleType = 0; + private ValueVector singleVector; + + private int typeBufferAllocationSizeInBytes; + + private final FieldType fieldType; + private final Field[] typeIds = new Field[Byte.MAX_VALUE + 1]; + + public static final byte TYPE_WIDTH = 1; + private static final FieldType INTERNAL_STRUCT_TYPE = new FieldType(false /*nullable*/, + ArrowType.Struct.INSTANCE, null /*dictionary*/, null /*metadata*/); + + public static UnionVector empty(String name, BufferAllocator allocator) { + FieldType fieldType = FieldType.nullable(new ArrowType.Union( + UnionMode.Sparse, null)); + return new UnionVector(name, allocator, fieldType, null); + } + + public UnionVector(String name, BufferAllocator allocator, FieldType fieldType, CallBack callBack) { + super(name, allocator, callBack); + this.fieldType = fieldType; + this.internalStruct = new NonNullableStructVector( + "internal", + allocator, + INTERNAL_STRUCT_TYPE, + callBack, + AbstractStructVector.ConflictPolicy.CONFLICT_REPLACE, + false); + this.typeBuffer = allocator.getEmpty(); + this.typeBufferAllocationSizeInBytes = BaseValueVector.INITIAL_VALUE_ALLOCATION * TYPE_WIDTH; + } + + public BufferAllocator getAllocator() { + return allocator; + } + + @Override + public MinorType getMinorType() { + return MinorType.UNION; + } + + @Override + public void initializeChildrenFromFields(List children) { + int count = 0; + for (Field child: children) { + int typeId = Types.getMinorTypeForArrowType(child.getType()).ordinal(); + if (this.fieldType != null) { + int[] typeIds = ((ArrowType.Union)this.fieldType.getType()).getTypeIds(); + if (typeIds != null) { + typeId = typeIds[count++]; + } + } + typeIds[typeId] = child; + } + internalStruct.initializeChildrenFromFields(children); + } + + @Override + public List getChildrenFromFields() { + return internalStruct.getChildrenFromFields(); + } + + @Override + public void loadFieldBuffers(ArrowFieldNode fieldNode, List ownBuffers) { + if (ownBuffers.size() != 1) { + throw new IllegalArgumentException("Illegal buffer count, expected 1, got: " + ownBuffers.size()); + } + ArrowBuf buffer = ownBuffers.get(0); + typeBuffer.getReferenceManager().release(); + typeBuffer = buffer.getReferenceManager().retain(buffer, allocator); + typeBufferAllocationSizeInBytes = checkedCastToInt(typeBuffer.capacity()); + this.valueCount = fieldNode.getLength(); + } + + @Override + public List getFieldBuffers() { + List result = new ArrayList<>(1); + setReaderAndWriterIndex(); + result.add(typeBuffer); + + return result; + } + + private void setReaderAndWriterIndex() { + typeBuffer.readerIndex(0); + typeBuffer.writerIndex(valueCount * TYPE_WIDTH); + } + + /** + * Get the inner vectors. + * + * @deprecated This API will be removed as the current implementations no longer support inner vectors. + * + * @return the inner vectors for this field as defined by the TypeLayout + */ + @Deprecated + @Override + public List getFieldInnerVectors() { + throw new UnsupportedOperationException("There are no inner vectors. Use geFieldBuffers"); + } + + private String fieldName(MinorType type) { + return type.name().toLowerCase(); + } + + private FieldType fieldType(MinorType type) { + return FieldType.nullable(type.getType()); + } + + private T addOrGet(Types.MinorType minorType, Class c) { + return addOrGet(null, minorType, c); + } + + private T addOrGet(String name, Types.MinorType minorType, ArrowType arrowType, Class c) { + return internalStruct.addOrGet(name == null ? fieldName(minorType) : name, FieldType.nullable(arrowType), c); + } + + private T addOrGet(String name, Types.MinorType minorType, Class c) { + return internalStruct.addOrGet(name == null ? fieldName(minorType) : name, fieldType(minorType), c); + } + + + @Override + public long getValidityBufferAddress() { + throw new UnsupportedOperationException(); + } + + public long getTypeBufferAddress() { + return typeBuffer.memoryAddress(); + } + + @Override + public long getDataBufferAddress() { + throw new UnsupportedOperationException(); + } + + @Override + public long getOffsetBufferAddress() { + throw new UnsupportedOperationException(); + } + + public ArrowBuf getTypeBuffer() { + return typeBuffer; + } + + @Override + public ArrowBuf getValidityBuffer() { throw new UnsupportedOperationException(); } + + @Override + public ArrowBuf getDataBuffer() { throw new UnsupportedOperationException(); } + + @Override + public ArrowBuf getOffsetBuffer() { throw new UnsupportedOperationException(); } + + public StructVector getStruct() { + if (structVector == null) { + int vectorCount = internalStruct.size(); + structVector = addOrGet(MinorType.STRUCT, StructVector.class); + if (internalStruct.size() > vectorCount) { + structVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return structVector; + } + + private TinyIntVector tinyIntVector; + + public TinyIntVector getTinyIntVector() { + return getTinyIntVector(null); + } + + public TinyIntVector getTinyIntVector(String name) { + if (tinyIntVector == null) { + int vectorCount = internalStruct.size(); + tinyIntVector = addOrGet(name, MinorType.TINYINT, TinyIntVector.class); + if (internalStruct.size() > vectorCount) { + tinyIntVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return tinyIntVector; + } + + private UInt1Vector uInt1Vector; + + public UInt1Vector getUInt1Vector() { + return getUInt1Vector(null); + } + + public UInt1Vector getUInt1Vector(String name) { + if (uInt1Vector == null) { + int vectorCount = internalStruct.size(); + uInt1Vector = addOrGet(name, MinorType.UINT1, UInt1Vector.class); + if (internalStruct.size() > vectorCount) { + uInt1Vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return uInt1Vector; + } + + private UInt2Vector uInt2Vector; + + public UInt2Vector getUInt2Vector() { + return getUInt2Vector(null); + } + + public UInt2Vector getUInt2Vector(String name) { + if (uInt2Vector == null) { + int vectorCount = internalStruct.size(); + uInt2Vector = addOrGet(name, MinorType.UINT2, UInt2Vector.class); + if (internalStruct.size() > vectorCount) { + uInt2Vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return uInt2Vector; + } + + private SmallIntVector smallIntVector; + + public SmallIntVector getSmallIntVector() { + return getSmallIntVector(null); + } + + public SmallIntVector getSmallIntVector(String name) { + if (smallIntVector == null) { + int vectorCount = internalStruct.size(); + smallIntVector = addOrGet(name, MinorType.SMALLINT, SmallIntVector.class); + if (internalStruct.size() > vectorCount) { + smallIntVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return smallIntVector; + } + + private Float2Vector float2Vector; + + public Float2Vector getFloat2Vector() { + return getFloat2Vector(null); + } + + public Float2Vector getFloat2Vector(String name) { + if (float2Vector == null) { + int vectorCount = internalStruct.size(); + float2Vector = addOrGet(name, MinorType.FLOAT2, Float2Vector.class); + if (internalStruct.size() > vectorCount) { + float2Vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return float2Vector; + } + + private IntVector intVector; + + public IntVector getIntVector() { + return getIntVector(null); + } + + public IntVector getIntVector(String name) { + if (intVector == null) { + int vectorCount = internalStruct.size(); + intVector = addOrGet(name, MinorType.INT, IntVector.class); + if (internalStruct.size() > vectorCount) { + intVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return intVector; + } + + private UInt4Vector uInt4Vector; + + public UInt4Vector getUInt4Vector() { + return getUInt4Vector(null); + } + + public UInt4Vector getUInt4Vector(String name) { + if (uInt4Vector == null) { + int vectorCount = internalStruct.size(); + uInt4Vector = addOrGet(name, MinorType.UINT4, UInt4Vector.class); + if (internalStruct.size() > vectorCount) { + uInt4Vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return uInt4Vector; + } + + private Float4Vector float4Vector; + + public Float4Vector getFloat4Vector() { + return getFloat4Vector(null); + } + + public Float4Vector getFloat4Vector(String name) { + if (float4Vector == null) { + int vectorCount = internalStruct.size(); + float4Vector = addOrGet(name, MinorType.FLOAT4, Float4Vector.class); + if (internalStruct.size() > vectorCount) { + float4Vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return float4Vector; + } + + private DateDayVector dateDayVector; + + public DateDayVector getDateDayVector() { + return getDateDayVector(null); + } + + public DateDayVector getDateDayVector(String name) { + if (dateDayVector == null) { + int vectorCount = internalStruct.size(); + dateDayVector = addOrGet(name, MinorType.DATEDAY, DateDayVector.class); + if (internalStruct.size() > vectorCount) { + dateDayVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return dateDayVector; + } + + private IntervalYearVector intervalYearVector; + + public IntervalYearVector getIntervalYearVector() { + return getIntervalYearVector(null); + } + + public IntervalYearVector getIntervalYearVector(String name) { + if (intervalYearVector == null) { + int vectorCount = internalStruct.size(); + intervalYearVector = addOrGet(name, MinorType.INTERVALYEAR, IntervalYearVector.class); + if (internalStruct.size() > vectorCount) { + intervalYearVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return intervalYearVector; + } + + private TimeSecVector timeSecVector; + + public TimeSecVector getTimeSecVector() { + return getTimeSecVector(null); + } + + public TimeSecVector getTimeSecVector(String name) { + if (timeSecVector == null) { + int vectorCount = internalStruct.size(); + timeSecVector = addOrGet(name, MinorType.TIMESEC, TimeSecVector.class); + if (internalStruct.size() > vectorCount) { + timeSecVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return timeSecVector; + } + + private TimeMilliVector timeMilliVector; + + public TimeMilliVector getTimeMilliVector() { + return getTimeMilliVector(null); + } + + public TimeMilliVector getTimeMilliVector(String name) { + if (timeMilliVector == null) { + int vectorCount = internalStruct.size(); + timeMilliVector = addOrGet(name, MinorType.TIMEMILLI, TimeMilliVector.class); + if (internalStruct.size() > vectorCount) { + timeMilliVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return timeMilliVector; + } + + private BigIntVector bigIntVector; + + public BigIntVector getBigIntVector() { + return getBigIntVector(null); + } + + public BigIntVector getBigIntVector(String name) { + if (bigIntVector == null) { + int vectorCount = internalStruct.size(); + bigIntVector = addOrGet(name, MinorType.BIGINT, BigIntVector.class); + if (internalStruct.size() > vectorCount) { + bigIntVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return bigIntVector; + } + + private UInt8Vector uInt8Vector; + + public UInt8Vector getUInt8Vector() { + return getUInt8Vector(null); + } + + public UInt8Vector getUInt8Vector(String name) { + if (uInt8Vector == null) { + int vectorCount = internalStruct.size(); + uInt8Vector = addOrGet(name, MinorType.UINT8, UInt8Vector.class); + if (internalStruct.size() > vectorCount) { + uInt8Vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return uInt8Vector; + } + + private Float8Vector float8Vector; + + public Float8Vector getFloat8Vector() { + return getFloat8Vector(null); + } + + public Float8Vector getFloat8Vector(String name) { + if (float8Vector == null) { + int vectorCount = internalStruct.size(); + float8Vector = addOrGet(name, MinorType.FLOAT8, Float8Vector.class); + if (internalStruct.size() > vectorCount) { + float8Vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return float8Vector; + } + + private DateMilliVector dateMilliVector; + + public DateMilliVector getDateMilliVector() { + return getDateMilliVector(null); + } + + public DateMilliVector getDateMilliVector(String name) { + if (dateMilliVector == null) { + int vectorCount = internalStruct.size(); + dateMilliVector = addOrGet(name, MinorType.DATEMILLI, DateMilliVector.class); + if (internalStruct.size() > vectorCount) { + dateMilliVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return dateMilliVector; + } + + private DurationVector durationVector; + + public DurationVector getDurationVector() { + if (durationVector == null) { + throw new IllegalArgumentException("No Duration present. Provide ArrowType argument to create a new vector"); + } + return durationVector; + } + public DurationVector getDurationVector(ArrowType arrowType) { + return getDurationVector(null, arrowType); + } + public DurationVector getDurationVector(String name, ArrowType arrowType) { + if (durationVector == null) { + int vectorCount = internalStruct.size(); + durationVector = addOrGet(name, MinorType.DURATION, arrowType, DurationVector.class); + if (internalStruct.size() > vectorCount) { + durationVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return durationVector; + } + + private TimeStampSecVector timeStampSecVector; + + public TimeStampSecVector getTimeStampSecVector() { + return getTimeStampSecVector(null); + } + + public TimeStampSecVector getTimeStampSecVector(String name) { + if (timeStampSecVector == null) { + int vectorCount = internalStruct.size(); + timeStampSecVector = addOrGet(name, MinorType.TIMESTAMPSEC, TimeStampSecVector.class); + if (internalStruct.size() > vectorCount) { + timeStampSecVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return timeStampSecVector; + } + + private TimeStampMilliVector timeStampMilliVector; + + public TimeStampMilliVector getTimeStampMilliVector() { + return getTimeStampMilliVector(null); + } + + public TimeStampMilliVector getTimeStampMilliVector(String name) { + if (timeStampMilliVector == null) { + int vectorCount = internalStruct.size(); + timeStampMilliVector = addOrGet(name, MinorType.TIMESTAMPMILLI, TimeStampMilliVector.class); + if (internalStruct.size() > vectorCount) { + timeStampMilliVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return timeStampMilliVector; + } + + private TimeStampMicroVector timeStampMicroVector; + + public TimeStampMicroVector getTimeStampMicroVector() { + return getTimeStampMicroVector(null); + } + + public TimeStampMicroVector getTimeStampMicroVector(String name) { + if (timeStampMicroVector == null) { + int vectorCount = internalStruct.size(); + timeStampMicroVector = addOrGet(name, MinorType.TIMESTAMPMICRO, TimeStampMicroVector.class); + if (internalStruct.size() > vectorCount) { + timeStampMicroVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return timeStampMicroVector; + } + + private TimeStampNanoVector timeStampNanoVector; + + public TimeStampNanoVector getTimeStampNanoVector() { + return getTimeStampNanoVector(null); + } + + public TimeStampNanoVector getTimeStampNanoVector(String name) { + if (timeStampNanoVector == null) { + int vectorCount = internalStruct.size(); + timeStampNanoVector = addOrGet(name, MinorType.TIMESTAMPNANO, TimeStampNanoVector.class); + if (internalStruct.size() > vectorCount) { + timeStampNanoVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return timeStampNanoVector; + } + + private TimeStampSecTZVector timeStampSecTZVector; + + public TimeStampSecTZVector getTimeStampSecTZVector() { + if (timeStampSecTZVector == null) { + throw new IllegalArgumentException("No TimeStampSecTZ present. Provide ArrowType argument to create a new vector"); + } + return timeStampSecTZVector; + } + public TimeStampSecTZVector getTimeStampSecTZVector(ArrowType arrowType) { + return getTimeStampSecTZVector(null, arrowType); + } + public TimeStampSecTZVector getTimeStampSecTZVector(String name, ArrowType arrowType) { + if (timeStampSecTZVector == null) { + int vectorCount = internalStruct.size(); + timeStampSecTZVector = addOrGet(name, MinorType.TIMESTAMPSECTZ, arrowType, TimeStampSecTZVector.class); + if (internalStruct.size() > vectorCount) { + timeStampSecTZVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return timeStampSecTZVector; + } + + private TimeStampMilliTZVector timeStampMilliTZVector; + + public TimeStampMilliTZVector getTimeStampMilliTZVector() { + if (timeStampMilliTZVector == null) { + throw new IllegalArgumentException("No TimeStampMilliTZ present. Provide ArrowType argument to create a new vector"); + } + return timeStampMilliTZVector; + } + public TimeStampMilliTZVector getTimeStampMilliTZVector(ArrowType arrowType) { + return getTimeStampMilliTZVector(null, arrowType); + } + public TimeStampMilliTZVector getTimeStampMilliTZVector(String name, ArrowType arrowType) { + if (timeStampMilliTZVector == null) { + int vectorCount = internalStruct.size(); + timeStampMilliTZVector = addOrGet(name, MinorType.TIMESTAMPMILLITZ, arrowType, TimeStampMilliTZVector.class); + if (internalStruct.size() > vectorCount) { + timeStampMilliTZVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return timeStampMilliTZVector; + } + + private TimeStampMicroTZVector timeStampMicroTZVector; + + public TimeStampMicroTZVector getTimeStampMicroTZVector() { + if (timeStampMicroTZVector == null) { + throw new IllegalArgumentException("No TimeStampMicroTZ present. Provide ArrowType argument to create a new vector"); + } + return timeStampMicroTZVector; + } + public TimeStampMicroTZVector getTimeStampMicroTZVector(ArrowType arrowType) { + return getTimeStampMicroTZVector(null, arrowType); + } + public TimeStampMicroTZVector getTimeStampMicroTZVector(String name, ArrowType arrowType) { + if (timeStampMicroTZVector == null) { + int vectorCount = internalStruct.size(); + timeStampMicroTZVector = addOrGet(name, MinorType.TIMESTAMPMICROTZ, arrowType, TimeStampMicroTZVector.class); + if (internalStruct.size() > vectorCount) { + timeStampMicroTZVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return timeStampMicroTZVector; + } + + private TimeStampNanoTZVector timeStampNanoTZVector; + + public TimeStampNanoTZVector getTimeStampNanoTZVector() { + if (timeStampNanoTZVector == null) { + throw new IllegalArgumentException("No TimeStampNanoTZ present. Provide ArrowType argument to create a new vector"); + } + return timeStampNanoTZVector; + } + public TimeStampNanoTZVector getTimeStampNanoTZVector(ArrowType arrowType) { + return getTimeStampNanoTZVector(null, arrowType); + } + public TimeStampNanoTZVector getTimeStampNanoTZVector(String name, ArrowType arrowType) { + if (timeStampNanoTZVector == null) { + int vectorCount = internalStruct.size(); + timeStampNanoTZVector = addOrGet(name, MinorType.TIMESTAMPNANOTZ, arrowType, TimeStampNanoTZVector.class); + if (internalStruct.size() > vectorCount) { + timeStampNanoTZVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return timeStampNanoTZVector; + } + + private TimeMicroVector timeMicroVector; + + public TimeMicroVector getTimeMicroVector() { + return getTimeMicroVector(null); + } + + public TimeMicroVector getTimeMicroVector(String name) { + if (timeMicroVector == null) { + int vectorCount = internalStruct.size(); + timeMicroVector = addOrGet(name, MinorType.TIMEMICRO, TimeMicroVector.class); + if (internalStruct.size() > vectorCount) { + timeMicroVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return timeMicroVector; + } + + private TimeNanoVector timeNanoVector; + + public TimeNanoVector getTimeNanoVector() { + return getTimeNanoVector(null); + } + + public TimeNanoVector getTimeNanoVector(String name) { + if (timeNanoVector == null) { + int vectorCount = internalStruct.size(); + timeNanoVector = addOrGet(name, MinorType.TIMENANO, TimeNanoVector.class); + if (internalStruct.size() > vectorCount) { + timeNanoVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return timeNanoVector; + } + + private IntervalDayVector intervalDayVector; + + public IntervalDayVector getIntervalDayVector() { + return getIntervalDayVector(null); + } + + public IntervalDayVector getIntervalDayVector(String name) { + if (intervalDayVector == null) { + int vectorCount = internalStruct.size(); + intervalDayVector = addOrGet(name, MinorType.INTERVALDAY, IntervalDayVector.class); + if (internalStruct.size() > vectorCount) { + intervalDayVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return intervalDayVector; + } + + private IntervalMonthDayNanoVector intervalMonthDayNanoVector; + + public IntervalMonthDayNanoVector getIntervalMonthDayNanoVector() { + return getIntervalMonthDayNanoVector(null); + } + + public IntervalMonthDayNanoVector getIntervalMonthDayNanoVector(String name) { + if (intervalMonthDayNanoVector == null) { + int vectorCount = internalStruct.size(); + intervalMonthDayNanoVector = addOrGet(name, MinorType.INTERVALMONTHDAYNANO, IntervalMonthDayNanoVector.class); + if (internalStruct.size() > vectorCount) { + intervalMonthDayNanoVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return intervalMonthDayNanoVector; + } + + private Decimal256Vector decimal256Vector; + + public Decimal256Vector getDecimal256Vector() { + if (decimal256Vector == null) { + throw new IllegalArgumentException("No Decimal256 present. Provide ArrowType argument to create a new vector"); + } + return decimal256Vector; + } + public Decimal256Vector getDecimal256Vector(ArrowType arrowType) { + return getDecimal256Vector(null, arrowType); + } + public Decimal256Vector getDecimal256Vector(String name, ArrowType arrowType) { + if (decimal256Vector == null) { + int vectorCount = internalStruct.size(); + decimal256Vector = addOrGet(name, MinorType.DECIMAL256, arrowType, Decimal256Vector.class); + if (internalStruct.size() > vectorCount) { + decimal256Vector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return decimal256Vector; + } + + private DecimalVector decimalVector; + + public DecimalVector getDecimalVector() { + if (decimalVector == null) { + throw new IllegalArgumentException("No Decimal present. Provide ArrowType argument to create a new vector"); + } + return decimalVector; + } + public DecimalVector getDecimalVector(ArrowType arrowType) { + return getDecimalVector(null, arrowType); + } + public DecimalVector getDecimalVector(String name, ArrowType arrowType) { + if (decimalVector == null) { + int vectorCount = internalStruct.size(); + decimalVector = addOrGet(name, MinorType.DECIMAL, arrowType, DecimalVector.class); + if (internalStruct.size() > vectorCount) { + decimalVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return decimalVector; + } + + private FixedSizeBinaryVector fixedSizeBinaryVector; + + public FixedSizeBinaryVector getFixedSizeBinaryVector() { + if (fixedSizeBinaryVector == null) { + throw new IllegalArgumentException("No FixedSizeBinary present. Provide ArrowType argument to create a new vector"); + } + return fixedSizeBinaryVector; + } + public FixedSizeBinaryVector getFixedSizeBinaryVector(ArrowType arrowType) { + return getFixedSizeBinaryVector(null, arrowType); + } + public FixedSizeBinaryVector getFixedSizeBinaryVector(String name, ArrowType arrowType) { + if (fixedSizeBinaryVector == null) { + int vectorCount = internalStruct.size(); + fixedSizeBinaryVector = addOrGet(name, MinorType.FIXEDSIZEBINARY, arrowType, FixedSizeBinaryVector.class); + if (internalStruct.size() > vectorCount) { + fixedSizeBinaryVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return fixedSizeBinaryVector; + } + + private VarBinaryVector varBinaryVector; + + public VarBinaryVector getVarBinaryVector() { + return getVarBinaryVector(null); + } + + public VarBinaryVector getVarBinaryVector(String name) { + if (varBinaryVector == null) { + int vectorCount = internalStruct.size(); + varBinaryVector = addOrGet(name, MinorType.VARBINARY, VarBinaryVector.class); + if (internalStruct.size() > vectorCount) { + varBinaryVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return varBinaryVector; + } + + private VarCharVector varCharVector; + + public VarCharVector getVarCharVector() { + return getVarCharVector(null); + } + + public VarCharVector getVarCharVector(String name) { + if (varCharVector == null) { + int vectorCount = internalStruct.size(); + varCharVector = addOrGet(name, MinorType.VARCHAR, VarCharVector.class); + if (internalStruct.size() > vectorCount) { + varCharVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return varCharVector; + } + + private LargeVarCharVector largeVarCharVector; + + public LargeVarCharVector getLargeVarCharVector() { + return getLargeVarCharVector(null); + } + + public LargeVarCharVector getLargeVarCharVector(String name) { + if (largeVarCharVector == null) { + int vectorCount = internalStruct.size(); + largeVarCharVector = addOrGet(name, MinorType.LARGEVARCHAR, LargeVarCharVector.class); + if (internalStruct.size() > vectorCount) { + largeVarCharVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return largeVarCharVector; + } + + private LargeVarBinaryVector largeVarBinaryVector; + + public LargeVarBinaryVector getLargeVarBinaryVector() { + return getLargeVarBinaryVector(null); + } + + public LargeVarBinaryVector getLargeVarBinaryVector(String name) { + if (largeVarBinaryVector == null) { + int vectorCount = internalStruct.size(); + largeVarBinaryVector = addOrGet(name, MinorType.LARGEVARBINARY, LargeVarBinaryVector.class); + if (internalStruct.size() > vectorCount) { + largeVarBinaryVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return largeVarBinaryVector; + } + + private BitVector bitVector; + + public BitVector getBitVector() { + return getBitVector(null); + } + + public BitVector getBitVector(String name) { + if (bitVector == null) { + int vectorCount = internalStruct.size(); + bitVector = addOrGet(name, MinorType.BIT, BitVector.class); + if (internalStruct.size() > vectorCount) { + bitVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return bitVector; + } + + public ListVector getList() { + if (listVector == null) { + int vectorCount = internalStruct.size(); + listVector = addOrGet(MinorType.LIST, ListVector.class); + if (internalStruct.size() > vectorCount) { + listVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return listVector; + } + + public MapVector getMap() { + if (mapVector == null) { + throw new IllegalArgumentException("No map present. Provide ArrowType argument to create a new vector"); + } + return mapVector; + } + + public MapVector getMap(ArrowType arrowType) { + return getMap(null, arrowType); + } + + public MapVector getMap(String name, ArrowType arrowType) { + if (mapVector == null) { + int vectorCount = internalStruct.size(); + mapVector = addOrGet(name, MinorType.MAP, arrowType, MapVector.class); + if (internalStruct.size() > vectorCount) { + mapVector.allocateNew(); + if (callBack != null) { + callBack.doWork(); + } + } + } + return mapVector; + } + + public int getTypeValue(int index) { + return typeBuffer.getByte(index * TYPE_WIDTH); + } + + @Override + public void allocateNew() throws OutOfMemoryException { + /* new allocation -- clear the current buffers */ + clear(); + internalStruct.allocateNew(); + try { + allocateTypeBuffer(); + } catch (Exception e) { + clear(); + throw e; + } + } + + @Override + public boolean allocateNewSafe() { + /* new allocation -- clear the current buffers */ + clear(); + boolean safe = internalStruct.allocateNewSafe(); + if (!safe) { return false; } + try { + allocateTypeBuffer(); + } catch (Exception e) { + clear(); + return false; + } + + return true; + } + + private void allocateTypeBuffer() { + typeBuffer = allocator.buffer(typeBufferAllocationSizeInBytes); + typeBuffer.readerIndex(0); + typeBuffer.setZero(0, typeBuffer.capacity()); + } + + @Override + public void reAlloc() { + internalStruct.reAlloc(); + reallocTypeBuffer(); + } + + private void reallocTypeBuffer() { + final long currentBufferCapacity = typeBuffer.capacity(); + long newAllocationSize = currentBufferCapacity * 2; + if (newAllocationSize == 0) { + if (typeBufferAllocationSizeInBytes > 0) { + newAllocationSize = typeBufferAllocationSizeInBytes; + } else { + newAllocationSize = BaseValueVector.INITIAL_VALUE_ALLOCATION * TYPE_WIDTH * 2; + } + } + newAllocationSize = CommonUtil.nextPowerOfTwo(newAllocationSize); + assert newAllocationSize >= 1; + + if (newAllocationSize > BaseValueVector.MAX_ALLOCATION_SIZE) { + throw new OversizedAllocationException("Unable to expand the buffer"); + } + + final ArrowBuf newBuf = allocator.buffer(checkedCastToInt(newAllocationSize)); + newBuf.setBytes(0, typeBuffer, 0, currentBufferCapacity); + newBuf.setZero(currentBufferCapacity, newBuf.capacity() - currentBufferCapacity); + typeBuffer.getReferenceManager().release(1); + typeBuffer = newBuf; + typeBufferAllocationSizeInBytes = (int)newAllocationSize; + } + + @Override + public void setInitialCapacity(int numRecords) { } + + @Override + public int getValueCapacity() { + return Math.min(getTypeBufferValueCapacity(), internalStruct.getValueCapacity()); + } + + @Override + public void close() { + clear(); + } + + @Override + public void clear() { + valueCount = 0; + typeBuffer.getReferenceManager().release(); + typeBuffer = allocator.getEmpty(); + internalStruct.clear(); + } + + @Override + public void reset() { + valueCount = 0; + typeBuffer.setZero(0, typeBuffer.capacity()); + internalStruct.reset(); + } + + @Override + public Field getField() { + List childFields = new ArrayList<>(); + List children = internalStruct.getChildren(); + int[] typeIds = new int[children.size()]; + for (ValueVector v : children) { + typeIds[childFields.size()] = v.getMinorType().ordinal(); + childFields.add(v.getField()); + } + + FieldType fieldType; + if (this.fieldType == null) { + fieldType = FieldType.nullable(new ArrowType.Union(Sparse, typeIds)); + } else { + final UnionMode mode = ((ArrowType.Union)this.fieldType.getType()).getMode(); + fieldType = new FieldType(this.fieldType.isNullable(), new ArrowType.Union(mode, typeIds), + this.fieldType.getDictionary(), this.fieldType.getMetadata()); + } + + return new Field(name, fieldType, childFields); + } + + @Override + public TransferPair getTransferPair(BufferAllocator allocator) { + return getTransferPair(name, allocator); + } + + @Override + public TransferPair getTransferPair(String ref, BufferAllocator allocator) { + return getTransferPair(ref, allocator, null); + } + + @Override + public TransferPair getTransferPair(String ref, BufferAllocator allocator, CallBack callBack) { + return new org.apache.arrow.vector.complex.UnionVector.TransferImpl(ref, allocator, callBack); + } + + @Override + public TransferPair getTransferPair(Field field, BufferAllocator allocator) { + return getTransferPair(field, allocator, null); + } + + @Override + public TransferPair getTransferPair(Field field, BufferAllocator allocator, CallBack callBack) { + return new org.apache.arrow.vector.complex.UnionVector.TransferImpl(field, allocator, callBack); + } + + @Override + public TransferPair makeTransferPair(ValueVector target) { + return new TransferImpl((UnionVector) target); + } + + @Override + public void copyFrom(int inIndex, int outIndex, ValueVector from) { + Preconditions.checkArgument(this.getMinorType() == from.getMinorType()); + UnionVector fromCast = (UnionVector) from; + fromCast.getReader().setPosition(inIndex); + getWriter().setPosition(outIndex); + ComplexCopier.copy(fromCast.reader, writer); + } + + @Override + public void copyFromSafe(int inIndex, int outIndex, ValueVector from) { + copyFrom(inIndex, outIndex, from); + } + + public FieldVector addVector(FieldVector v) { + final String name = v.getName().isEmpty() ? fieldName(v.getMinorType()) : v.getName(); + Preconditions.checkState(internalStruct.getChild(name) == null, String.format("%s vector already exists", name)); + final FieldVector newVector = internalStruct.addOrGet(name, v.getField().getFieldType(), v.getClass()); + v.makeTransferPair(newVector).transfer(); + internalStruct.putChild(name, newVector); + if (callBack != null) { + callBack.doWork(); + } + return newVector; + } + + /** + * Directly put a vector to internalStruct without creating a new one with same type. + */ + public void directAddVector(FieldVector v) { + String name = fieldName(v.getMinorType()); + Preconditions.checkState(internalStruct.getChild(name) == null, String.format("%s vector already exists", name)); + internalStruct.putChild(name, v); + if (callBack != null) { + callBack.doWork(); + } + } + + private class TransferImpl implements TransferPair { + private final TransferPair internalStructVectorTransferPair; + private final UnionVector to; + + public TransferImpl(String name, BufferAllocator allocator, CallBack callBack) { + to = new UnionVector(name, allocator, /* field type */ null, callBack); + internalStructVectorTransferPair = internalStruct.makeTransferPair(to.internalStruct); + } + + public TransferImpl(Field field, BufferAllocator allocator, CallBack callBack) { + to = new UnionVector(field.getName(), allocator, null, callBack); + internalStructVectorTransferPair = internalStruct.makeTransferPair(to.internalStruct); + } + + public TransferImpl(UnionVector to) { + this.to = to; + internalStructVectorTransferPair = internalStruct.makeTransferPair(to.internalStruct); + } + + @Override + public void transfer() { + to.clear(); + ReferenceManager refManager = typeBuffer.getReferenceManager(); + to.typeBuffer = refManager.transferOwnership(typeBuffer, to.allocator).getTransferredBuffer(); + internalStructVectorTransferPair.transfer(); + to.valueCount = valueCount; + clear(); + } + + @Override + public void splitAndTransfer(int startIndex, int length) { + Preconditions.checkArgument(startIndex >= 0 && length >= 0 && startIndex + length <= valueCount, + "Invalid parameters startIndex: %s, length: %s for valueCount: %s", startIndex, length, valueCount); + to.clear(); + + internalStructVectorTransferPair.splitAndTransfer(startIndex, length); + final int startPoint = startIndex * TYPE_WIDTH; + final int sliceLength = length * TYPE_WIDTH; + final ArrowBuf slicedBuffer = typeBuffer.slice(startPoint, sliceLength); + final ReferenceManager refManager = slicedBuffer.getReferenceManager(); + to.typeBuffer = refManager.transferOwnership(slicedBuffer, to.allocator).getTransferredBuffer(); + to.setValueCount(length); + } + + @Override + public ValueVector getTo() { + return to; + } + + @Override + public void copyValueSafe(int from, int to) { + this.to.copyFrom(from, to, UnionVector.this); + } + } + + @Override + public FieldReader getReader() { + if (reader == null) { + reader = new UnionReader(this); + } + return reader; + } + + public FieldWriter getWriter() { + if (writer == null) { + writer = new UnionWriter(this); + } + return writer; + } + + @Override + public int getBufferSize() { + if (valueCount == 0) { return 0; } + + return (valueCount * TYPE_WIDTH) + internalStruct.getBufferSize(); + } + + @Override + public int getBufferSizeFor(final int valueCount) { + if (valueCount == 0) { + return 0; + } + + long bufferSize = 0; + for (final ValueVector v : (Iterable) this) { + bufferSize += v.getBufferSizeFor(valueCount); + } + + return (int) bufferSize + (valueCount * TYPE_WIDTH); + } + + @Override + public ArrowBuf[] getBuffers(boolean clear) { + List list = new java.util.ArrayList<>(); + setReaderAndWriterIndex(); + if (getBufferSize() != 0) { + list.add(typeBuffer); + list.addAll(java.util.Arrays.asList(internalStruct.getBuffers(clear))); + } + if (clear) { + valueCount = 0; + typeBuffer.getReferenceManager().retain(); + typeBuffer.getReferenceManager().release(); + typeBuffer = allocator.getEmpty(); + } + return list.toArray(new ArrowBuf[list.size()]); + } + + @Override + public Iterator iterator() { + return internalStruct.iterator(); + } + + public ValueVector getVector(int index) { + return getVector(index, null); + } + + public ValueVector getVector(int index, ArrowType arrowType) { + int type = typeBuffer.getByte(index * TYPE_WIDTH); + return getVectorByType(type, arrowType); + } + + public ValueVector getVectorByType(int typeId) { + return getVectorByType(typeId, null); + } + + public ValueVector getVectorByType(int typeId, ArrowType arrowType) { + Field type = typeIds[typeId]; + Types.MinorType minorType; + String name = null; + if (type == null) { + minorType = Types.MinorType.values()[typeId]; + } else { + minorType = Types.getMinorTypeForArrowType(type.getType()); + name = type.getName(); + } + switch (minorType) { + case NULL: + return null; + case TINYINT: + return getTinyIntVector(name); + case UINT1: + return getUInt1Vector(name); + case UINT2: + return getUInt2Vector(name); + case SMALLINT: + return getSmallIntVector(name); + case FLOAT2: + return getFloat2Vector(name); + case INT: + return getIntVector(name); + case UINT4: + return getUInt4Vector(name); + case FLOAT4: + return getFloat4Vector(name); + case DATEDAY: + return getDateDayVector(name); + case INTERVALYEAR: + return getIntervalYearVector(name); + case TIMESEC: + return getTimeSecVector(name); + case TIMEMILLI: + return getTimeMilliVector(name); + case BIGINT: + return getBigIntVector(name); + case UINT8: + return getUInt8Vector(name); + case FLOAT8: + return getFloat8Vector(name); + case DATEMILLI: + return getDateMilliVector(name); + case DURATION: + return getDurationVector(name, arrowType); + case TIMESTAMPSEC: + return getTimeStampSecVector(name); + case TIMESTAMPMILLI: + return getTimeStampMilliVector(name); + case TIMESTAMPMICRO: + return getTimeStampMicroVector(name); + case TIMESTAMPNANO: + return getTimeStampNanoVector(name); + case TIMESTAMPSECTZ: + return getTimeStampSecTZVector(name, arrowType); + case TIMESTAMPMILLITZ: + return getTimeStampMilliTZVector(name, arrowType); + case TIMESTAMPMICROTZ: + return getTimeStampMicroTZVector(name, arrowType); + case TIMESTAMPNANOTZ: + return getTimeStampNanoTZVector(name, arrowType); + case TIMEMICRO: + return getTimeMicroVector(name); + case TIMENANO: + return getTimeNanoVector(name); + case INTERVALDAY: + return getIntervalDayVector(name); + case INTERVALMONTHDAYNANO: + return getIntervalMonthDayNanoVector(name); + case DECIMAL256: + return getDecimal256Vector(name, arrowType); + case DECIMAL: + return getDecimalVector(name, arrowType); + case FIXEDSIZEBINARY: + return getFixedSizeBinaryVector(name, arrowType); + case VARBINARY: + return getVarBinaryVector(name); + case VARCHAR: + return getVarCharVector(name); + case LARGEVARCHAR: + return getLargeVarCharVector(name); + case LARGEVARBINARY: + return getLargeVarBinaryVector(name); + case BIT: + return getBitVector(name); + case STRUCT: + return getStruct(); + case LIST: + return getList(); + case MAP: + return getMap(name, arrowType); + default: + throw new UnsupportedOperationException("Cannot support type: " + MinorType.values()[typeId]); + } + } + + public Object getObject(int index) { + ValueVector vector = getVector(index); + if (vector != null) { + return vector.isNull(index) ? null : vector.getObject(index); + } + return null; + } + + public byte[] get(int index) { + return null; + } + + public void get(int index, ComplexHolder holder) { + } + + public void get(int index, UnionHolder holder) { + FieldReader reader = new UnionReader(UnionVector.this); + reader.setPosition(index); + holder.reader = reader; + } + + public int getValueCount() { + return valueCount; + } + + /** + * IMPORTANT: Union types always return non null as there is no validity buffer. + * + * To check validity correctly you must check the underlying vector. + */ + public boolean isNull(int index) { + return false; + } + + @Override + public int getNullCount() { + return 0; + } + + public int isSet(int index) { + return isNull(index) ? 0 : 1; + } + + UnionWriter writer; + + public void setValueCount(int valueCount) { + this.valueCount = valueCount; + while (valueCount > getTypeBufferValueCapacity()) { + reallocTypeBuffer(); + } + internalStruct.setValueCount(valueCount); + } + + public void setSafe(int index, UnionHolder holder) { + setSafe(index, holder, null); + } + + public void setSafe(int index, UnionHolder holder, ArrowType arrowType) { + FieldReader reader = holder.reader; + if (writer == null) { + writer = new UnionWriter(UnionVector.this); + } + writer.setPosition(index); + MinorType type = reader.getMinorType(); + switch (type) { + case TINYINT: + NullableTinyIntHolder tinyIntHolder = new NullableTinyIntHolder(); + reader.read(tinyIntHolder); + setSafe(index, tinyIntHolder); + break; + case UINT1: + NullableUInt1Holder uInt1Holder = new NullableUInt1Holder(); + reader.read(uInt1Holder); + setSafe(index, uInt1Holder); + break; + case UINT2: + NullableUInt2Holder uInt2Holder = new NullableUInt2Holder(); + reader.read(uInt2Holder); + setSafe(index, uInt2Holder); + break; + case SMALLINT: + NullableSmallIntHolder smallIntHolder = new NullableSmallIntHolder(); + reader.read(smallIntHolder); + setSafe(index, smallIntHolder); + break; + case FLOAT2: + NullableFloat2Holder float2Holder = new NullableFloat2Holder(); + reader.read(float2Holder); + setSafe(index, float2Holder); + break; + case INT: + NullableIntHolder intHolder = new NullableIntHolder(); + reader.read(intHolder); + setSafe(index, intHolder); + break; + case UINT4: + NullableUInt4Holder uInt4Holder = new NullableUInt4Holder(); + reader.read(uInt4Holder); + setSafe(index, uInt4Holder); + break; + case FLOAT4: + NullableFloat4Holder float4Holder = new NullableFloat4Holder(); + reader.read(float4Holder); + setSafe(index, float4Holder); + break; + case DATEDAY: + NullableDateDayHolder dateDayHolder = new NullableDateDayHolder(); + reader.read(dateDayHolder); + setSafe(index, dateDayHolder); + break; + case INTERVALYEAR: + NullableIntervalYearHolder intervalYearHolder = new NullableIntervalYearHolder(); + reader.read(intervalYearHolder); + setSafe(index, intervalYearHolder); + break; + case TIMESEC: + NullableTimeSecHolder timeSecHolder = new NullableTimeSecHolder(); + reader.read(timeSecHolder); + setSafe(index, timeSecHolder); + break; + case TIMEMILLI: + NullableTimeMilliHolder timeMilliHolder = new NullableTimeMilliHolder(); + reader.read(timeMilliHolder); + setSafe(index, timeMilliHolder); + break; + case BIGINT: + NullableBigIntHolder bigIntHolder = new NullableBigIntHolder(); + reader.read(bigIntHolder); + setSafe(index, bigIntHolder); + break; + case UINT8: + NullableUInt8Holder uInt8Holder = new NullableUInt8Holder(); + reader.read(uInt8Holder); + setSafe(index, uInt8Holder); + break; + case FLOAT8: + NullableFloat8Holder float8Holder = new NullableFloat8Holder(); + reader.read(float8Holder); + setSafe(index, float8Holder); + break; + case DATEMILLI: + NullableDateMilliHolder dateMilliHolder = new NullableDateMilliHolder(); + reader.read(dateMilliHolder); + setSafe(index, dateMilliHolder); + break; + case DURATION: + NullableDurationHolder durationHolder = new NullableDurationHolder(); + reader.read(durationHolder); + setSafe(index, durationHolder, arrowType); + break; + case TIMESTAMPSEC: + NullableTimeStampSecHolder timeStampSecHolder = new NullableTimeStampSecHolder(); + reader.read(timeStampSecHolder); + setSafe(index, timeStampSecHolder); + break; + case TIMESTAMPMILLI: + NullableTimeStampMilliHolder timeStampMilliHolder = new NullableTimeStampMilliHolder(); + reader.read(timeStampMilliHolder); + setSafe(index, timeStampMilliHolder); + break; + case TIMESTAMPMICRO: + NullableTimeStampMicroHolder timeStampMicroHolder = new NullableTimeStampMicroHolder(); + reader.read(timeStampMicroHolder); + setSafe(index, timeStampMicroHolder); + break; + case TIMESTAMPNANO: + NullableTimeStampNanoHolder timeStampNanoHolder = new NullableTimeStampNanoHolder(); + reader.read(timeStampNanoHolder); + setSafe(index, timeStampNanoHolder); + break; + case TIMESTAMPSECTZ: + NullableTimeStampSecTZHolder timeStampSecTZHolder = new NullableTimeStampSecTZHolder(); + reader.read(timeStampSecTZHolder); + setSafe(index, timeStampSecTZHolder, arrowType); + break; + case TIMESTAMPMILLITZ: + NullableTimeStampMilliTZHolder timeStampMilliTZHolder = new NullableTimeStampMilliTZHolder(); + reader.read(timeStampMilliTZHolder); + setSafe(index, timeStampMilliTZHolder, arrowType); + break; + case TIMESTAMPMICROTZ: + NullableTimeStampMicroTZHolder timeStampMicroTZHolder = new NullableTimeStampMicroTZHolder(); + reader.read(timeStampMicroTZHolder); + setSafe(index, timeStampMicroTZHolder, arrowType); + break; + case TIMESTAMPNANOTZ: + NullableTimeStampNanoTZHolder timeStampNanoTZHolder = new NullableTimeStampNanoTZHolder(); + reader.read(timeStampNanoTZHolder); + setSafe(index, timeStampNanoTZHolder, arrowType); + break; + case TIMEMICRO: + NullableTimeMicroHolder timeMicroHolder = new NullableTimeMicroHolder(); + reader.read(timeMicroHolder); + setSafe(index, timeMicroHolder); + break; + case TIMENANO: + NullableTimeNanoHolder timeNanoHolder = new NullableTimeNanoHolder(); + reader.read(timeNanoHolder); + setSafe(index, timeNanoHolder); + break; + case INTERVALDAY: + NullableIntervalDayHolder intervalDayHolder = new NullableIntervalDayHolder(); + reader.read(intervalDayHolder); + setSafe(index, intervalDayHolder); + break; + case INTERVALMONTHDAYNANO: + NullableIntervalMonthDayNanoHolder intervalMonthDayNanoHolder = new NullableIntervalMonthDayNanoHolder(); + reader.read(intervalMonthDayNanoHolder); + setSafe(index, intervalMonthDayNanoHolder); + break; + case DECIMAL256: + NullableDecimal256Holder decimal256Holder = new NullableDecimal256Holder(); + reader.read(decimal256Holder); + setSafe(index, decimal256Holder, arrowType); + break; + case DECIMAL: + NullableDecimalHolder decimalHolder = new NullableDecimalHolder(); + reader.read(decimalHolder); + setSafe(index, decimalHolder, arrowType); + break; + case FIXEDSIZEBINARY: + NullableFixedSizeBinaryHolder fixedSizeBinaryHolder = new NullableFixedSizeBinaryHolder(); + reader.read(fixedSizeBinaryHolder); + setSafe(index, fixedSizeBinaryHolder, arrowType); + break; + case VARBINARY: + NullableVarBinaryHolder varBinaryHolder = new NullableVarBinaryHolder(); + reader.read(varBinaryHolder); + setSafe(index, varBinaryHolder); + break; + case VARCHAR: + NullableVarCharHolder varCharHolder = new NullableVarCharHolder(); + reader.read(varCharHolder); + setSafe(index, varCharHolder); + break; + case LARGEVARCHAR: + NullableLargeVarCharHolder largeVarCharHolder = new NullableLargeVarCharHolder(); + reader.read(largeVarCharHolder); + setSafe(index, largeVarCharHolder); + break; + case LARGEVARBINARY: + NullableLargeVarBinaryHolder largeVarBinaryHolder = new NullableLargeVarBinaryHolder(); + reader.read(largeVarBinaryHolder); + setSafe(index, largeVarBinaryHolder); + break; + case BIT: + NullableBitHolder bitHolder = new NullableBitHolder(); + reader.read(bitHolder); + setSafe(index, bitHolder); + break; + case STRUCT: { + ComplexCopier.copy(reader, writer); + break; + } + case LIST: { + ComplexCopier.copy(reader, writer); + break; + } + default: + throw new UnsupportedOperationException(); + } + } + + public void setSafe(int index, NullableTinyIntHolder holder) { + setType(index, MinorType.TINYINT); + getTinyIntVector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableUInt1Holder holder) { + setType(index, MinorType.UINT1); + getUInt1Vector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableUInt2Holder holder) { + setType(index, MinorType.UINT2); + getUInt2Vector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableSmallIntHolder holder) { + setType(index, MinorType.SMALLINT); + getSmallIntVector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableFloat2Holder holder) { + setType(index, MinorType.FLOAT2); + getFloat2Vector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableIntHolder holder) { + setType(index, MinorType.INT); + getIntVector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableUInt4Holder holder) { + setType(index, MinorType.UINT4); + getUInt4Vector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableFloat4Holder holder) { + setType(index, MinorType.FLOAT4); + getFloat4Vector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableDateDayHolder holder) { + setType(index, MinorType.DATEDAY); + getDateDayVector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableIntervalYearHolder holder) { + setType(index, MinorType.INTERVALYEAR); + getIntervalYearVector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableTimeSecHolder holder) { + setType(index, MinorType.TIMESEC); + getTimeSecVector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableTimeMilliHolder holder) { + setType(index, MinorType.TIMEMILLI); + getTimeMilliVector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableBigIntHolder holder) { + setType(index, MinorType.BIGINT); + getBigIntVector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableUInt8Holder holder) { + setType(index, MinorType.UINT8); + getUInt8Vector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableFloat8Holder holder) { + setType(index, MinorType.FLOAT8); + getFloat8Vector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableDateMilliHolder holder) { + setType(index, MinorType.DATEMILLI); + getDateMilliVector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableDurationHolder holder, ArrowType arrowType) { + setType(index, MinorType.DURATION); + getDurationVector(null, arrowType).setSafe(index, holder); + } + public void setSafe(int index, NullableTimeStampSecHolder holder) { + setType(index, MinorType.TIMESTAMPSEC); + getTimeStampSecVector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableTimeStampMilliHolder holder) { + setType(index, MinorType.TIMESTAMPMILLI); + getTimeStampMilliVector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableTimeStampMicroHolder holder) { + setType(index, MinorType.TIMESTAMPMICRO); + getTimeStampMicroVector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableTimeStampNanoHolder holder) { + setType(index, MinorType.TIMESTAMPNANO); + getTimeStampNanoVector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableTimeStampSecTZHolder holder, ArrowType arrowType) { + setType(index, MinorType.TIMESTAMPSECTZ); + getTimeStampSecTZVector(null, arrowType).setSafe(index, holder); + } + public void setSafe(int index, NullableTimeStampMilliTZHolder holder, ArrowType arrowType) { + setType(index, MinorType.TIMESTAMPMILLITZ); + getTimeStampMilliTZVector(null, arrowType).setSafe(index, holder); + } + public void setSafe(int index, NullableTimeStampMicroTZHolder holder, ArrowType arrowType) { + setType(index, MinorType.TIMESTAMPMICROTZ); + getTimeStampMicroTZVector(null, arrowType).setSafe(index, holder); + } + public void setSafe(int index, NullableTimeStampNanoTZHolder holder, ArrowType arrowType) { + setType(index, MinorType.TIMESTAMPNANOTZ); + getTimeStampNanoTZVector(null, arrowType).setSafe(index, holder); + } + public void setSafe(int index, NullableTimeMicroHolder holder) { + setType(index, MinorType.TIMEMICRO); + getTimeMicroVector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableTimeNanoHolder holder) { + setType(index, MinorType.TIMENANO); + getTimeNanoVector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableIntervalDayHolder holder) { + setType(index, MinorType.INTERVALDAY); + getIntervalDayVector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableIntervalMonthDayNanoHolder holder) { + setType(index, MinorType.INTERVALMONTHDAYNANO); + getIntervalMonthDayNanoVector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableDecimal256Holder holder, ArrowType arrowType) { + setType(index, MinorType.DECIMAL256); + getDecimal256Vector(null, arrowType).setSafe(index, holder); + } + public void setSafe(int index, NullableDecimalHolder holder, ArrowType arrowType) { + setType(index, MinorType.DECIMAL); + getDecimalVector(null, arrowType).setSafe(index, holder); + } + public void setSafe(int index, NullableFixedSizeBinaryHolder holder, ArrowType arrowType) { + setType(index, MinorType.FIXEDSIZEBINARY); + getFixedSizeBinaryVector(null, arrowType).setSafe(index, holder); + } + public void setSafe(int index, NullableVarBinaryHolder holder) { + setType(index, MinorType.VARBINARY); + getVarBinaryVector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableVarCharHolder holder) { + setType(index, MinorType.VARCHAR); + getVarCharVector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableLargeVarCharHolder holder) { + setType(index, MinorType.LARGEVARCHAR); + getLargeVarCharVector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableLargeVarBinaryHolder holder) { + setType(index, MinorType.LARGEVARBINARY); + getLargeVarBinaryVector(null).setSafe(index, holder); + } + public void setSafe(int index, NullableBitHolder holder) { + setType(index, MinorType.BIT); + getBitVector(null).setSafe(index, holder); + } + + public void setType(int index, MinorType type) { + while (index >= getTypeBufferValueCapacity()) { + reallocTypeBuffer(); + } + typeBuffer.setByte(index * TYPE_WIDTH , (byte) type.ordinal()); + } + + private int getTypeBufferValueCapacity() { + return capAtMaxInt(typeBuffer.capacity() / TYPE_WIDTH); + } + + @Override + public int hashCode(int index) { + return hashCode(index, null); + } + + @Override + public int hashCode(int index, ArrowBufHasher hasher) { + ValueVector vec = getVector(index); + if (vec == null) { + return ArrowBufPointer.NULL_HASH_CODE; + } + return vec.hashCode(index, hasher); + } + + @Override + public OUT accept(VectorVisitor visitor, IN value) { + return visitor.visit(this, value); + } + + @Override + public String getName() { + return name; + } + + @Override + public String toString() { + return ValueVectorUtility.getToString(this, 0, getValueCount()); + } + + @Override + public T addOrGet(String name, FieldType fieldType, Class clazz) { + return internalStruct.addOrGet(name, fieldType, clazz); + } + + @Override + public T getChild(String name, Class clazz) { + return internalStruct.getChild(name, clazz); + } + + @Override + public VectorWithOrdinal getChildVectorWithOrdinal(String name) { + return internalStruct.getChildVectorWithOrdinal(name); + } + + @Override + public int size() { + return internalStruct.size(); + } + + @Override + public void setInitialCapacity(int valueCount, double density) { + for (final ValueVector vector : internalStruct) { + if (vector instanceof DensityAwareVector) { + ((DensityAwareVector) vector).setInitialCapacity(valueCount, density); + } else { + vector.setInitialCapacity(valueCount); + } + } + } + + /** + * Set the element at the given index to null. For UnionVector, it throws an UnsupportedOperationException + * as nulls are not supported at the top level and isNull() always returns false. + * + * @param index position of element + * @throws UnsupportedOperationException whenever invoked + */ + @Override + public void setNull(int index) { + throw new UnsupportedOperationException("The method setNull() is not supported on UnionVector."); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/AbstractFieldReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/AbstractFieldReader.java new file mode 100644 index 000000000000..824b3a777c95 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/AbstractFieldReader.java @@ -0,0 +1,1183 @@ + + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template AbstractFieldReader.java + */ +@SuppressWarnings("unused") +abstract class AbstractFieldReader extends AbstractBaseReader implements FieldReader{ + + AbstractFieldReader(){ + super(); + } + + /** + * Returns true if the current value of the reader is not null + * @return whether the current value is set + */ + public boolean isSet() { + return true; + } + + @Override + public Field getField() { + fail("getField"); + return null; + } + + public Object readObject(int arrayIndex) { + fail("readObject(int arrayIndex)"); + return null; + } + + public Object readObject() { + fail("readObject()"); + return null; + } + + public BigDecimal readBigDecimal(int arrayIndex) { + fail("readBigDecimal(int arrayIndex)"); + return null; + } + + public BigDecimal readBigDecimal() { + fail("readBigDecimal()"); + return null; + } + + public Short readShort(int arrayIndex) { + fail("readShort(int arrayIndex)"); + return null; + } + + public Short readShort() { + fail("readShort()"); + return null; + } + + public Integer readInteger(int arrayIndex) { + fail("readInteger(int arrayIndex)"); + return null; + } + + public Integer readInteger() { + fail("readInteger()"); + return null; + } + + public Long readLong(int arrayIndex) { + fail("readLong(int arrayIndex)"); + return null; + } + + public Long readLong() { + fail("readLong()"); + return null; + } + + public Boolean readBoolean(int arrayIndex) { + fail("readBoolean(int arrayIndex)"); + return null; + } + + public Boolean readBoolean() { + fail("readBoolean()"); + return null; + } + + public LocalDateTime readLocalDateTime(int arrayIndex) { + fail("readLocalDateTime(int arrayIndex)"); + return null; + } + + public LocalDateTime readLocalDateTime() { + fail("readLocalDateTime()"); + return null; + } + + public Duration readDuration(int arrayIndex) { + fail("readDuration(int arrayIndex)"); + return null; + } + + public Duration readDuration() { + fail("readDuration()"); + return null; + } + + public Period readPeriod(int arrayIndex) { + fail("readPeriod(int arrayIndex)"); + return null; + } + + public Period readPeriod() { + fail("readPeriod()"); + return null; + } + + public Double readDouble(int arrayIndex) { + fail("readDouble(int arrayIndex)"); + return null; + } + + public Double readDouble() { + fail("readDouble()"); + return null; + } + + public Float readFloat(int arrayIndex) { + fail("readFloat(int arrayIndex)"); + return null; + } + + public Float readFloat() { + fail("readFloat()"); + return null; + } + + public Character readCharacter(int arrayIndex) { + fail("readCharacter(int arrayIndex)"); + return null; + } + + public Character readCharacter() { + fail("readCharacter()"); + return null; + } + + public Text readText(int arrayIndex) { + fail("readText(int arrayIndex)"); + return null; + } + + public Text readText() { + fail("readText()"); + return null; + } + + public String readString(int arrayIndex) { + fail("readString(int arrayIndex)"); + return null; + } + + public String readString() { + fail("readString()"); + return null; + } + + public Byte readByte(int arrayIndex) { + fail("readByte(int arrayIndex)"); + return null; + } + + public Byte readByte() { + fail("readByte()"); + return null; + } + + public byte[] readByteArray(int arrayIndex) { + fail("readByteArray(int arrayIndex)"); + return null; + } + + public byte[] readByteArray() { + fail("readByteArray()"); + return null; + } + + public PeriodDuration readPeriodDuration(int arrayIndex) { + fail("readPeriodDuration(int arrayIndex)"); + return null; + } + + public PeriodDuration readPeriodDuration() { + fail("readPeriodDuration()"); + return null; + } + + public void copyAsValue(StructWriter writer) { + fail("CopyAsValue StructWriter"); + } + + public void copyAsField(String name, StructWriter writer) { + fail("CopyAsField StructWriter"); + } + + public void copyAsField(String name, ListWriter writer) { + fail("CopyAsFieldList"); + } + + public void copyAsField(String name, MapWriter writer) { + fail("CopyAsFieldMap"); + } + + public void read(TinyIntHolder holder) { + fail("TinyInt"); + } + + public void read(NullableTinyIntHolder holder) { + fail("TinyInt"); + } + + public void read(int arrayIndex, TinyIntHolder holder) { + fail("RepeatedTinyInt"); + } + + public void read(int arrayIndex, NullableTinyIntHolder holder) { + fail("RepeatedTinyInt"); + } + + public void copyAsValue(TinyIntWriter writer) { + fail("CopyAsValueTinyInt"); + } + + public void copyAsField(String name, TinyIntWriter writer) { + fail("CopyAsFieldTinyInt"); + } + + public void read(UInt1Holder holder) { + fail("UInt1"); + } + + public void read(NullableUInt1Holder holder) { + fail("UInt1"); + } + + public void read(int arrayIndex, UInt1Holder holder) { + fail("RepeatedUInt1"); + } + + public void read(int arrayIndex, NullableUInt1Holder holder) { + fail("RepeatedUInt1"); + } + + public void copyAsValue(UInt1Writer writer) { + fail("CopyAsValueUInt1"); + } + + public void copyAsField(String name, UInt1Writer writer) { + fail("CopyAsFieldUInt1"); + } + + public void read(UInt2Holder holder) { + fail("UInt2"); + } + + public void read(NullableUInt2Holder holder) { + fail("UInt2"); + } + + public void read(int arrayIndex, UInt2Holder holder) { + fail("RepeatedUInt2"); + } + + public void read(int arrayIndex, NullableUInt2Holder holder) { + fail("RepeatedUInt2"); + } + + public void copyAsValue(UInt2Writer writer) { + fail("CopyAsValueUInt2"); + } + + public void copyAsField(String name, UInt2Writer writer) { + fail("CopyAsFieldUInt2"); + } + + public void read(SmallIntHolder holder) { + fail("SmallInt"); + } + + public void read(NullableSmallIntHolder holder) { + fail("SmallInt"); + } + + public void read(int arrayIndex, SmallIntHolder holder) { + fail("RepeatedSmallInt"); + } + + public void read(int arrayIndex, NullableSmallIntHolder holder) { + fail("RepeatedSmallInt"); + } + + public void copyAsValue(SmallIntWriter writer) { + fail("CopyAsValueSmallInt"); + } + + public void copyAsField(String name, SmallIntWriter writer) { + fail("CopyAsFieldSmallInt"); + } + + public void read(Float2Holder holder) { + fail("Float2"); + } + + public void read(NullableFloat2Holder holder) { + fail("Float2"); + } + + public void read(int arrayIndex, Float2Holder holder) { + fail("RepeatedFloat2"); + } + + public void read(int arrayIndex, NullableFloat2Holder holder) { + fail("RepeatedFloat2"); + } + + public void copyAsValue(Float2Writer writer) { + fail("CopyAsValueFloat2"); + } + + public void copyAsField(String name, Float2Writer writer) { + fail("CopyAsFieldFloat2"); + } + + public void read(IntHolder holder) { + fail("Int"); + } + + public void read(NullableIntHolder holder) { + fail("Int"); + } + + public void read(int arrayIndex, IntHolder holder) { + fail("RepeatedInt"); + } + + public void read(int arrayIndex, NullableIntHolder holder) { + fail("RepeatedInt"); + } + + public void copyAsValue(IntWriter writer) { + fail("CopyAsValueInt"); + } + + public void copyAsField(String name, IntWriter writer) { + fail("CopyAsFieldInt"); + } + + public void read(UInt4Holder holder) { + fail("UInt4"); + } + + public void read(NullableUInt4Holder holder) { + fail("UInt4"); + } + + public void read(int arrayIndex, UInt4Holder holder) { + fail("RepeatedUInt4"); + } + + public void read(int arrayIndex, NullableUInt4Holder holder) { + fail("RepeatedUInt4"); + } + + public void copyAsValue(UInt4Writer writer) { + fail("CopyAsValueUInt4"); + } + + public void copyAsField(String name, UInt4Writer writer) { + fail("CopyAsFieldUInt4"); + } + + public void read(Float4Holder holder) { + fail("Float4"); + } + + public void read(NullableFloat4Holder holder) { + fail("Float4"); + } + + public void read(int arrayIndex, Float4Holder holder) { + fail("RepeatedFloat4"); + } + + public void read(int arrayIndex, NullableFloat4Holder holder) { + fail("RepeatedFloat4"); + } + + public void copyAsValue(Float4Writer writer) { + fail("CopyAsValueFloat4"); + } + + public void copyAsField(String name, Float4Writer writer) { + fail("CopyAsFieldFloat4"); + } + + public void read(DateDayHolder holder) { + fail("DateDay"); + } + + public void read(NullableDateDayHolder holder) { + fail("DateDay"); + } + + public void read(int arrayIndex, DateDayHolder holder) { + fail("RepeatedDateDay"); + } + + public void read(int arrayIndex, NullableDateDayHolder holder) { + fail("RepeatedDateDay"); + } + + public void copyAsValue(DateDayWriter writer) { + fail("CopyAsValueDateDay"); + } + + public void copyAsField(String name, DateDayWriter writer) { + fail("CopyAsFieldDateDay"); + } + + public void read(IntervalYearHolder holder) { + fail("IntervalYear"); + } + + public void read(NullableIntervalYearHolder holder) { + fail("IntervalYear"); + } + + public void read(int arrayIndex, IntervalYearHolder holder) { + fail("RepeatedIntervalYear"); + } + + public void read(int arrayIndex, NullableIntervalYearHolder holder) { + fail("RepeatedIntervalYear"); + } + + public void copyAsValue(IntervalYearWriter writer) { + fail("CopyAsValueIntervalYear"); + } + + public void copyAsField(String name, IntervalYearWriter writer) { + fail("CopyAsFieldIntervalYear"); + } + + public void read(TimeSecHolder holder) { + fail("TimeSec"); + } + + public void read(NullableTimeSecHolder holder) { + fail("TimeSec"); + } + + public void read(int arrayIndex, TimeSecHolder holder) { + fail("RepeatedTimeSec"); + } + + public void read(int arrayIndex, NullableTimeSecHolder holder) { + fail("RepeatedTimeSec"); + } + + public void copyAsValue(TimeSecWriter writer) { + fail("CopyAsValueTimeSec"); + } + + public void copyAsField(String name, TimeSecWriter writer) { + fail("CopyAsFieldTimeSec"); + } + + public void read(TimeMilliHolder holder) { + fail("TimeMilli"); + } + + public void read(NullableTimeMilliHolder holder) { + fail("TimeMilli"); + } + + public void read(int arrayIndex, TimeMilliHolder holder) { + fail("RepeatedTimeMilli"); + } + + public void read(int arrayIndex, NullableTimeMilliHolder holder) { + fail("RepeatedTimeMilli"); + } + + public void copyAsValue(TimeMilliWriter writer) { + fail("CopyAsValueTimeMilli"); + } + + public void copyAsField(String name, TimeMilliWriter writer) { + fail("CopyAsFieldTimeMilli"); + } + + public void read(BigIntHolder holder) { + fail("BigInt"); + } + + public void read(NullableBigIntHolder holder) { + fail("BigInt"); + } + + public void read(int arrayIndex, BigIntHolder holder) { + fail("RepeatedBigInt"); + } + + public void read(int arrayIndex, NullableBigIntHolder holder) { + fail("RepeatedBigInt"); + } + + public void copyAsValue(BigIntWriter writer) { + fail("CopyAsValueBigInt"); + } + + public void copyAsField(String name, BigIntWriter writer) { + fail("CopyAsFieldBigInt"); + } + + public void read(UInt8Holder holder) { + fail("UInt8"); + } + + public void read(NullableUInt8Holder holder) { + fail("UInt8"); + } + + public void read(int arrayIndex, UInt8Holder holder) { + fail("RepeatedUInt8"); + } + + public void read(int arrayIndex, NullableUInt8Holder holder) { + fail("RepeatedUInt8"); + } + + public void copyAsValue(UInt8Writer writer) { + fail("CopyAsValueUInt8"); + } + + public void copyAsField(String name, UInt8Writer writer) { + fail("CopyAsFieldUInt8"); + } + + public void read(Float8Holder holder) { + fail("Float8"); + } + + public void read(NullableFloat8Holder holder) { + fail("Float8"); + } + + public void read(int arrayIndex, Float8Holder holder) { + fail("RepeatedFloat8"); + } + + public void read(int arrayIndex, NullableFloat8Holder holder) { + fail("RepeatedFloat8"); + } + + public void copyAsValue(Float8Writer writer) { + fail("CopyAsValueFloat8"); + } + + public void copyAsField(String name, Float8Writer writer) { + fail("CopyAsFieldFloat8"); + } + + public void read(DateMilliHolder holder) { + fail("DateMilli"); + } + + public void read(NullableDateMilliHolder holder) { + fail("DateMilli"); + } + + public void read(int arrayIndex, DateMilliHolder holder) { + fail("RepeatedDateMilli"); + } + + public void read(int arrayIndex, NullableDateMilliHolder holder) { + fail("RepeatedDateMilli"); + } + + public void copyAsValue(DateMilliWriter writer) { + fail("CopyAsValueDateMilli"); + } + + public void copyAsField(String name, DateMilliWriter writer) { + fail("CopyAsFieldDateMilli"); + } + + public void read(DurationHolder holder) { + fail("Duration"); + } + + public void read(NullableDurationHolder holder) { + fail("Duration"); + } + + public void read(int arrayIndex, DurationHolder holder) { + fail("RepeatedDuration"); + } + + public void read(int arrayIndex, NullableDurationHolder holder) { + fail("RepeatedDuration"); + } + + public void copyAsValue(DurationWriter writer) { + fail("CopyAsValueDuration"); + } + + public void copyAsField(String name, DurationWriter writer) { + fail("CopyAsFieldDuration"); + } + + public void read(TimeStampSecHolder holder) { + fail("TimeStampSec"); + } + + public void read(NullableTimeStampSecHolder holder) { + fail("TimeStampSec"); + } + + public void read(int arrayIndex, TimeStampSecHolder holder) { + fail("RepeatedTimeStampSec"); + } + + public void read(int arrayIndex, NullableTimeStampSecHolder holder) { + fail("RepeatedTimeStampSec"); + } + + public void copyAsValue(TimeStampSecWriter writer) { + fail("CopyAsValueTimeStampSec"); + } + + public void copyAsField(String name, TimeStampSecWriter writer) { + fail("CopyAsFieldTimeStampSec"); + } + + public void read(TimeStampMilliHolder holder) { + fail("TimeStampMilli"); + } + + public void read(NullableTimeStampMilliHolder holder) { + fail("TimeStampMilli"); + } + + public void read(int arrayIndex, TimeStampMilliHolder holder) { + fail("RepeatedTimeStampMilli"); + } + + public void read(int arrayIndex, NullableTimeStampMilliHolder holder) { + fail("RepeatedTimeStampMilli"); + } + + public void copyAsValue(TimeStampMilliWriter writer) { + fail("CopyAsValueTimeStampMilli"); + } + + public void copyAsField(String name, TimeStampMilliWriter writer) { + fail("CopyAsFieldTimeStampMilli"); + } + + public void read(TimeStampMicroHolder holder) { + fail("TimeStampMicro"); + } + + public void read(NullableTimeStampMicroHolder holder) { + fail("TimeStampMicro"); + } + + public void read(int arrayIndex, TimeStampMicroHolder holder) { + fail("RepeatedTimeStampMicro"); + } + + public void read(int arrayIndex, NullableTimeStampMicroHolder holder) { + fail("RepeatedTimeStampMicro"); + } + + public void copyAsValue(TimeStampMicroWriter writer) { + fail("CopyAsValueTimeStampMicro"); + } + + public void copyAsField(String name, TimeStampMicroWriter writer) { + fail("CopyAsFieldTimeStampMicro"); + } + + public void read(TimeStampNanoHolder holder) { + fail("TimeStampNano"); + } + + public void read(NullableTimeStampNanoHolder holder) { + fail("TimeStampNano"); + } + + public void read(int arrayIndex, TimeStampNanoHolder holder) { + fail("RepeatedTimeStampNano"); + } + + public void read(int arrayIndex, NullableTimeStampNanoHolder holder) { + fail("RepeatedTimeStampNano"); + } + + public void copyAsValue(TimeStampNanoWriter writer) { + fail("CopyAsValueTimeStampNano"); + } + + public void copyAsField(String name, TimeStampNanoWriter writer) { + fail("CopyAsFieldTimeStampNano"); + } + + public void read(TimeStampSecTZHolder holder) { + fail("TimeStampSecTZ"); + } + + public void read(NullableTimeStampSecTZHolder holder) { + fail("TimeStampSecTZ"); + } + + public void read(int arrayIndex, TimeStampSecTZHolder holder) { + fail("RepeatedTimeStampSecTZ"); + } + + public void read(int arrayIndex, NullableTimeStampSecTZHolder holder) { + fail("RepeatedTimeStampSecTZ"); + } + + public void copyAsValue(TimeStampSecTZWriter writer) { + fail("CopyAsValueTimeStampSecTZ"); + } + + public void copyAsField(String name, TimeStampSecTZWriter writer) { + fail("CopyAsFieldTimeStampSecTZ"); + } + + public void read(TimeStampMilliTZHolder holder) { + fail("TimeStampMilliTZ"); + } + + public void read(NullableTimeStampMilliTZHolder holder) { + fail("TimeStampMilliTZ"); + } + + public void read(int arrayIndex, TimeStampMilliTZHolder holder) { + fail("RepeatedTimeStampMilliTZ"); + } + + public void read(int arrayIndex, NullableTimeStampMilliTZHolder holder) { + fail("RepeatedTimeStampMilliTZ"); + } + + public void copyAsValue(TimeStampMilliTZWriter writer) { + fail("CopyAsValueTimeStampMilliTZ"); + } + + public void copyAsField(String name, TimeStampMilliTZWriter writer) { + fail("CopyAsFieldTimeStampMilliTZ"); + } + + public void read(TimeStampMicroTZHolder holder) { + fail("TimeStampMicroTZ"); + } + + public void read(NullableTimeStampMicroTZHolder holder) { + fail("TimeStampMicroTZ"); + } + + public void read(int arrayIndex, TimeStampMicroTZHolder holder) { + fail("RepeatedTimeStampMicroTZ"); + } + + public void read(int arrayIndex, NullableTimeStampMicroTZHolder holder) { + fail("RepeatedTimeStampMicroTZ"); + } + + public void copyAsValue(TimeStampMicroTZWriter writer) { + fail("CopyAsValueTimeStampMicroTZ"); + } + + public void copyAsField(String name, TimeStampMicroTZWriter writer) { + fail("CopyAsFieldTimeStampMicroTZ"); + } + + public void read(TimeStampNanoTZHolder holder) { + fail("TimeStampNanoTZ"); + } + + public void read(NullableTimeStampNanoTZHolder holder) { + fail("TimeStampNanoTZ"); + } + + public void read(int arrayIndex, TimeStampNanoTZHolder holder) { + fail("RepeatedTimeStampNanoTZ"); + } + + public void read(int arrayIndex, NullableTimeStampNanoTZHolder holder) { + fail("RepeatedTimeStampNanoTZ"); + } + + public void copyAsValue(TimeStampNanoTZWriter writer) { + fail("CopyAsValueTimeStampNanoTZ"); + } + + public void copyAsField(String name, TimeStampNanoTZWriter writer) { + fail("CopyAsFieldTimeStampNanoTZ"); + } + + public void read(TimeMicroHolder holder) { + fail("TimeMicro"); + } + + public void read(NullableTimeMicroHolder holder) { + fail("TimeMicro"); + } + + public void read(int arrayIndex, TimeMicroHolder holder) { + fail("RepeatedTimeMicro"); + } + + public void read(int arrayIndex, NullableTimeMicroHolder holder) { + fail("RepeatedTimeMicro"); + } + + public void copyAsValue(TimeMicroWriter writer) { + fail("CopyAsValueTimeMicro"); + } + + public void copyAsField(String name, TimeMicroWriter writer) { + fail("CopyAsFieldTimeMicro"); + } + + public void read(TimeNanoHolder holder) { + fail("TimeNano"); + } + + public void read(NullableTimeNanoHolder holder) { + fail("TimeNano"); + } + + public void read(int arrayIndex, TimeNanoHolder holder) { + fail("RepeatedTimeNano"); + } + + public void read(int arrayIndex, NullableTimeNanoHolder holder) { + fail("RepeatedTimeNano"); + } + + public void copyAsValue(TimeNanoWriter writer) { + fail("CopyAsValueTimeNano"); + } + + public void copyAsField(String name, TimeNanoWriter writer) { + fail("CopyAsFieldTimeNano"); + } + + public void read(IntervalDayHolder holder) { + fail("IntervalDay"); + } + + public void read(NullableIntervalDayHolder holder) { + fail("IntervalDay"); + } + + public void read(int arrayIndex, IntervalDayHolder holder) { + fail("RepeatedIntervalDay"); + } + + public void read(int arrayIndex, NullableIntervalDayHolder holder) { + fail("RepeatedIntervalDay"); + } + + public void copyAsValue(IntervalDayWriter writer) { + fail("CopyAsValueIntervalDay"); + } + + public void copyAsField(String name, IntervalDayWriter writer) { + fail("CopyAsFieldIntervalDay"); + } + + public void read(IntervalMonthDayNanoHolder holder) { + fail("IntervalMonthDayNano"); + } + + public void read(NullableIntervalMonthDayNanoHolder holder) { + fail("IntervalMonthDayNano"); + } + + public void read(int arrayIndex, IntervalMonthDayNanoHolder holder) { + fail("RepeatedIntervalMonthDayNano"); + } + + public void read(int arrayIndex, NullableIntervalMonthDayNanoHolder holder) { + fail("RepeatedIntervalMonthDayNano"); + } + + public void copyAsValue(IntervalMonthDayNanoWriter writer) { + fail("CopyAsValueIntervalMonthDayNano"); + } + + public void copyAsField(String name, IntervalMonthDayNanoWriter writer) { + fail("CopyAsFieldIntervalMonthDayNano"); + } + + public void read(Decimal256Holder holder) { + fail("Decimal256"); + } + + public void read(NullableDecimal256Holder holder) { + fail("Decimal256"); + } + + public void read(int arrayIndex, Decimal256Holder holder) { + fail("RepeatedDecimal256"); + } + + public void read(int arrayIndex, NullableDecimal256Holder holder) { + fail("RepeatedDecimal256"); + } + + public void copyAsValue(Decimal256Writer writer) { + fail("CopyAsValueDecimal256"); + } + + public void copyAsField(String name, Decimal256Writer writer) { + fail("CopyAsFieldDecimal256"); + } + + public void read(DecimalHolder holder) { + fail("Decimal"); + } + + public void read(NullableDecimalHolder holder) { + fail("Decimal"); + } + + public void read(int arrayIndex, DecimalHolder holder) { + fail("RepeatedDecimal"); + } + + public void read(int arrayIndex, NullableDecimalHolder holder) { + fail("RepeatedDecimal"); + } + + public void copyAsValue(DecimalWriter writer) { + fail("CopyAsValueDecimal"); + } + + public void copyAsField(String name, DecimalWriter writer) { + fail("CopyAsFieldDecimal"); + } + + public void read(FixedSizeBinaryHolder holder) { + fail("FixedSizeBinary"); + } + + public void read(NullableFixedSizeBinaryHolder holder) { + fail("FixedSizeBinary"); + } + + public void read(int arrayIndex, FixedSizeBinaryHolder holder) { + fail("RepeatedFixedSizeBinary"); + } + + public void read(int arrayIndex, NullableFixedSizeBinaryHolder holder) { + fail("RepeatedFixedSizeBinary"); + } + + public void copyAsValue(FixedSizeBinaryWriter writer) { + fail("CopyAsValueFixedSizeBinary"); + } + + public void copyAsField(String name, FixedSizeBinaryWriter writer) { + fail("CopyAsFieldFixedSizeBinary"); + } + + public void read(VarBinaryHolder holder) { + fail("VarBinary"); + } + + public void read(NullableVarBinaryHolder holder) { + fail("VarBinary"); + } + + public void read(int arrayIndex, VarBinaryHolder holder) { + fail("RepeatedVarBinary"); + } + + public void read(int arrayIndex, NullableVarBinaryHolder holder) { + fail("RepeatedVarBinary"); + } + + public void copyAsValue(VarBinaryWriter writer) { + fail("CopyAsValueVarBinary"); + } + + public void copyAsField(String name, VarBinaryWriter writer) { + fail("CopyAsFieldVarBinary"); + } + + public void read(VarCharHolder holder) { + fail("VarChar"); + } + + public void read(NullableVarCharHolder holder) { + fail("VarChar"); + } + + public void read(int arrayIndex, VarCharHolder holder) { + fail("RepeatedVarChar"); + } + + public void read(int arrayIndex, NullableVarCharHolder holder) { + fail("RepeatedVarChar"); + } + + public void copyAsValue(VarCharWriter writer) { + fail("CopyAsValueVarChar"); + } + + public void copyAsField(String name, VarCharWriter writer) { + fail("CopyAsFieldVarChar"); + } + + public void read(LargeVarCharHolder holder) { + fail("LargeVarChar"); + } + + public void read(NullableLargeVarCharHolder holder) { + fail("LargeVarChar"); + } + + public void read(int arrayIndex, LargeVarCharHolder holder) { + fail("RepeatedLargeVarChar"); + } + + public void read(int arrayIndex, NullableLargeVarCharHolder holder) { + fail("RepeatedLargeVarChar"); + } + + public void copyAsValue(LargeVarCharWriter writer) { + fail("CopyAsValueLargeVarChar"); + } + + public void copyAsField(String name, LargeVarCharWriter writer) { + fail("CopyAsFieldLargeVarChar"); + } + + public void read(LargeVarBinaryHolder holder) { + fail("LargeVarBinary"); + } + + public void read(NullableLargeVarBinaryHolder holder) { + fail("LargeVarBinary"); + } + + public void read(int arrayIndex, LargeVarBinaryHolder holder) { + fail("RepeatedLargeVarBinary"); + } + + public void read(int arrayIndex, NullableLargeVarBinaryHolder holder) { + fail("RepeatedLargeVarBinary"); + } + + public void copyAsValue(LargeVarBinaryWriter writer) { + fail("CopyAsValueLargeVarBinary"); + } + + public void copyAsField(String name, LargeVarBinaryWriter writer) { + fail("CopyAsFieldLargeVarBinary"); + } + + public void read(BitHolder holder) { + fail("Bit"); + } + + public void read(NullableBitHolder holder) { + fail("Bit"); + } + + public void read(int arrayIndex, BitHolder holder) { + fail("RepeatedBit"); + } + + public void read(int arrayIndex, NullableBitHolder holder) { + fail("RepeatedBit"); + } + + public void copyAsValue(BitWriter writer) { + fail("CopyAsValueBit"); + } + + public void copyAsField(String name, BitWriter writer) { + fail("CopyAsFieldBit"); + } + + public FieldReader reader(String name) { + fail("reader(String name)"); + return null; + } + + public FieldReader reader() { + fail("reader()"); + return null; + } + + public int size() { + fail("size()"); + return -1; + } + + private void fail(String name) { + throw new IllegalArgumentException(String.format("You tried to read a [%s] type when you are using a field reader of type [%s].", name, this.getClass().getSimpleName())); + } +} + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/AbstractFieldWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/AbstractFieldWriter.java new file mode 100644 index 000000000000..9f3bb4565ba3 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/AbstractFieldWriter.java @@ -0,0 +1,1258 @@ + + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/* + * This class is generated using freemarker and the AbstractFieldWriter.java template. + * Note that changes to the AbstractFieldWriter template should also get reflected in the + * AbstractPromotableFieldWriter, ComplexWriters, UnionFixedSizeListWriter, UnionListWriter + * and UnionWriter templates and the PromotableWriter concrete code. + */ +@SuppressWarnings("unused") +abstract class AbstractFieldWriter extends AbstractBaseWriter implements FieldWriter { + + protected boolean addVectorAsNullable = true; + + /** + * Set flag to control the FieldType.nullable property when a writer creates a new vector. + * If true then vectors created will be nullable, this is the default behavior. If false then + * vectors created will be non-nullable. + * + * @param nullable Whether or not to create nullable vectors (default behavior is true) + */ + public void setAddVectorAsNullable(boolean nullable) { + addVectorAsNullable = nullable; + } + + @Override + public void start() { + throw new IllegalStateException(String.format("You tried to start when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public void end() { + throw new IllegalStateException(String.format("You tried to end when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public void startList() { + throw new IllegalStateException(String.format("You tried to start a list when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public void endList() { + throw new IllegalStateException(String.format("You tried to end a list when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public void startMap() { + throw new IllegalStateException(String.format("You tried to start a map when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public void endMap() { + throw new IllegalStateException(String.format("You tried to end a map when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public void startEntry() { + throw new IllegalStateException(String.format("You tried to start a map entry when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public MapWriter key() { + throw new IllegalStateException(String.format("You tried to start a map key when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public MapWriter value() { + throw new IllegalStateException(String.format("You tried to start a map value when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public void endEntry() { + throw new IllegalStateException(String.format("You tried to end a map entry when you are using a ValueWriter of type %s.", this.getClass().getSimpleName())); + } + + @Override + public void write(TinyIntHolder holder) { + fail("TinyInt"); + } + + public void writeTinyInt(byte value) { + fail("TinyInt"); + } + + + + + @Override + public void write(UInt1Holder holder) { + fail("UInt1"); + } + + public void writeUInt1(byte value) { + fail("UInt1"); + } + + + + + @Override + public void write(UInt2Holder holder) { + fail("UInt2"); + } + + public void writeUInt2(char value) { + fail("UInt2"); + } + + + + + @Override + public void write(SmallIntHolder holder) { + fail("SmallInt"); + } + + public void writeSmallInt(short value) { + fail("SmallInt"); + } + + + + + @Override + public void write(Float2Holder holder) { + fail("Float2"); + } + + public void writeFloat2(short value) { + fail("Float2"); + } + + + + + @Override + public void write(IntHolder holder) { + fail("Int"); + } + + public void writeInt(int value) { + fail("Int"); + } + + + + + @Override + public void write(UInt4Holder holder) { + fail("UInt4"); + } + + public void writeUInt4(int value) { + fail("UInt4"); + } + + + + + @Override + public void write(Float4Holder holder) { + fail("Float4"); + } + + public void writeFloat4(float value) { + fail("Float4"); + } + + + + + @Override + public void write(DateDayHolder holder) { + fail("DateDay"); + } + + public void writeDateDay(int value) { + fail("DateDay"); + } + + + + + @Override + public void write(IntervalYearHolder holder) { + fail("IntervalYear"); + } + + public void writeIntervalYear(int value) { + fail("IntervalYear"); + } + + + + + @Override + public void write(TimeSecHolder holder) { + fail("TimeSec"); + } + + public void writeTimeSec(int value) { + fail("TimeSec"); + } + + + + + @Override + public void write(TimeMilliHolder holder) { + fail("TimeMilli"); + } + + public void writeTimeMilli(int value) { + fail("TimeMilli"); + } + + + + + @Override + public void write(BigIntHolder holder) { + fail("BigInt"); + } + + public void writeBigInt(long value) { + fail("BigInt"); + } + + + + + @Override + public void write(UInt8Holder holder) { + fail("UInt8"); + } + + public void writeUInt8(long value) { + fail("UInt8"); + } + + + + + @Override + public void write(Float8Holder holder) { + fail("Float8"); + } + + public void writeFloat8(double value) { + fail("Float8"); + } + + + + + @Override + public void write(DateMilliHolder holder) { + fail("DateMilli"); + } + + public void writeDateMilli(long value) { + fail("DateMilli"); + } + + + + + @Override + public void write(DurationHolder holder) { + fail("Duration"); + } + + public void writeDuration(long value) { + fail("Duration"); + } + + + + + @Override + public void write(TimeStampSecHolder holder) { + fail("TimeStampSec"); + } + + public void writeTimeStampSec(long value) { + fail("TimeStampSec"); + } + + + + + @Override + public void write(TimeStampMilliHolder holder) { + fail("TimeStampMilli"); + } + + public void writeTimeStampMilli(long value) { + fail("TimeStampMilli"); + } + + + + + @Override + public void write(TimeStampMicroHolder holder) { + fail("TimeStampMicro"); + } + + public void writeTimeStampMicro(long value) { + fail("TimeStampMicro"); + } + + + + + @Override + public void write(TimeStampNanoHolder holder) { + fail("TimeStampNano"); + } + + public void writeTimeStampNano(long value) { + fail("TimeStampNano"); + } + + + + + @Override + public void write(TimeStampSecTZHolder holder) { + fail("TimeStampSecTZ"); + } + + public void writeTimeStampSecTZ(long value) { + fail("TimeStampSecTZ"); + } + + + + + @Override + public void write(TimeStampMilliTZHolder holder) { + fail("TimeStampMilliTZ"); + } + + public void writeTimeStampMilliTZ(long value) { + fail("TimeStampMilliTZ"); + } + + + + + @Override + public void write(TimeStampMicroTZHolder holder) { + fail("TimeStampMicroTZ"); + } + + public void writeTimeStampMicroTZ(long value) { + fail("TimeStampMicroTZ"); + } + + + + + @Override + public void write(TimeStampNanoTZHolder holder) { + fail("TimeStampNanoTZ"); + } + + public void writeTimeStampNanoTZ(long value) { + fail("TimeStampNanoTZ"); + } + + + + + @Override + public void write(TimeMicroHolder holder) { + fail("TimeMicro"); + } + + public void writeTimeMicro(long value) { + fail("TimeMicro"); + } + + + + + @Override + public void write(TimeNanoHolder holder) { + fail("TimeNano"); + } + + public void writeTimeNano(long value) { + fail("TimeNano"); + } + + + + + @Override + public void write(IntervalDayHolder holder) { + fail("IntervalDay"); + } + + public void writeIntervalDay(int days, int milliseconds) { + fail("IntervalDay"); + } + + + + + @Override + public void write(IntervalMonthDayNanoHolder holder) { + fail("IntervalMonthDayNano"); + } + + public void writeIntervalMonthDayNano(int months, int days, long nanoseconds) { + fail("IntervalMonthDayNano"); + } + + + + + @Override + public void write(Decimal256Holder holder) { + fail("Decimal256"); + } + + public void writeDecimal256(long start, ArrowBuf buffer) { + fail("Decimal256"); + } + + public void writeDecimal256(BigDecimal value) { + fail("Decimal256"); + } + + public void writeDecimal256(long start, ArrowBuf buffer, ArrowType arrowType) { + fail("Decimal256"); + } + + public void writeBigEndianBytesToDecimal256(byte[] value) { + fail("Decimal256"); + } + + public void writeBigEndianBytesToDecimal256(byte[] value, ArrowType arrowType) { + fail("Decimal256"); + } + + + + @Override + public void write(DecimalHolder holder) { + fail("Decimal"); + } + + public void writeDecimal(long start, ArrowBuf buffer) { + fail("Decimal"); + } + + public void writeDecimal(BigDecimal value) { + fail("Decimal"); + } + + public void writeDecimal(long start, ArrowBuf buffer, ArrowType arrowType) { + fail("Decimal"); + } + + public void writeBigEndianBytesToDecimal(byte[] value) { + fail("Decimal"); + } + + public void writeBigEndianBytesToDecimal(byte[] value, ArrowType arrowType) { + fail("Decimal"); + } + + + + @Override + public void write(FixedSizeBinaryHolder holder) { + fail("FixedSizeBinary"); + } + + public void writeFixedSizeBinary(ArrowBuf buffer) { + fail("FixedSizeBinary"); + } + + + + + @Override + public void write(VarBinaryHolder holder) { + fail("VarBinary"); + } + + public void writeVarBinary(int start, int end, ArrowBuf buffer) { + fail("VarBinary"); + } + + + public void writeVarBinary(byte[] value) { + fail("VarBinary"); + } + + public void writeVarBinary(byte[] value, int offset, int length) { + fail("VarBinary"); + } + + public void writeVarBinary(ByteBuffer value) { + fail("VarBinary"); + } + + public void writeVarBinary(ByteBuffer value, int offset, int length) { + fail("VarBinary"); + } + + + @Override + public void write(VarCharHolder holder) { + fail("VarChar"); + } + + public void writeVarChar(int start, int end, ArrowBuf buffer) { + fail("VarChar"); + } + + + + public void writeVarChar(Text value) { + fail("VarChar"); + } + + public void writeVarChar(String value) { + fail("VarChar"); + } + + @Override + public void write(LargeVarCharHolder holder) { + fail("LargeVarChar"); + } + + public void writeLargeVarChar(long start, long end, ArrowBuf buffer) { + fail("LargeVarChar"); + } + + + + public void writeLargeVarChar(Text value) { + fail("LargeVarChar"); + } + + public void writeLargeVarChar(String value) { + fail("LargeVarChar"); + } + + @Override + public void write(LargeVarBinaryHolder holder) { + fail("LargeVarBinary"); + } + + public void writeLargeVarBinary(long start, long end, ArrowBuf buffer) { + fail("LargeVarBinary"); + } + + + public void writeLargeVarBinary(byte[] value) { + fail("LargeVarBinary"); + } + + public void writeLargeVarBinary(byte[] value, int offset, int length) { + fail("LargeVarBinary"); + } + + public void writeLargeVarBinary(ByteBuffer value) { + fail("LargeVarBinary"); + } + + public void writeLargeVarBinary(ByteBuffer value, int offset, int length) { + fail("LargeVarBinary"); + } + + + @Override + public void write(BitHolder holder) { + fail("Bit"); + } + + public void writeBit(int value) { + fail("Bit"); + } + + + + + + public void writeNull() { + fail("Bit"); + } + + /** + * This implementation returns {@code false}. + *

+ * Must be overridden by struct writers. + *

+ */ + @Override + public boolean isEmptyStruct() { + return false; + } + + @Override + public StructWriter struct() { + fail("Struct"); + return null; + } + + @Override + public ListWriter list() { + fail("List"); + return null; + } + + @Override + public MapWriter map() { + fail("Map"); + return null; + } + + @Override + public StructWriter struct(String name) { + fail("Struct"); + return null; + } + + @Override + public ListWriter list(String name) { + fail("List"); + return null; + } + + @Override + public MapWriter map(String name) { + fail("Map"); + return null; + } + + @Override + public MapWriter map(boolean keysSorted) { + fail("Map"); + return null; + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + fail("Map"); + return null; + } + + @Override + public TinyIntWriter tinyInt(String name) { + fail("TinyInt"); + return null; + } + + @Override + public TinyIntWriter tinyInt() { + fail("TinyInt"); + return null; + } + + + @Override + public UInt1Writer uInt1(String name) { + fail("UInt1"); + return null; + } + + @Override + public UInt1Writer uInt1() { + fail("UInt1"); + return null; + } + + + @Override + public UInt2Writer uInt2(String name) { + fail("UInt2"); + return null; + } + + @Override + public UInt2Writer uInt2() { + fail("UInt2"); + return null; + } + + + @Override + public SmallIntWriter smallInt(String name) { + fail("SmallInt"); + return null; + } + + @Override + public SmallIntWriter smallInt() { + fail("SmallInt"); + return null; + } + + + @Override + public Float2Writer float2(String name) { + fail("Float2"); + return null; + } + + @Override + public Float2Writer float2() { + fail("Float2"); + return null; + } + + + @Override + public IntWriter integer(String name) { + fail("Int"); + return null; + } + + @Override + public IntWriter integer() { + fail("Int"); + return null; + } + + + @Override + public UInt4Writer uInt4(String name) { + fail("UInt4"); + return null; + } + + @Override + public UInt4Writer uInt4() { + fail("UInt4"); + return null; + } + + + @Override + public Float4Writer float4(String name) { + fail("Float4"); + return null; + } + + @Override + public Float4Writer float4() { + fail("Float4"); + return null; + } + + + @Override + public DateDayWriter dateDay(String name) { + fail("DateDay"); + return null; + } + + @Override + public DateDayWriter dateDay() { + fail("DateDay"); + return null; + } + + + @Override + public IntervalYearWriter intervalYear(String name) { + fail("IntervalYear"); + return null; + } + + @Override + public IntervalYearWriter intervalYear() { + fail("IntervalYear"); + return null; + } + + + @Override + public TimeSecWriter timeSec(String name) { + fail("TimeSec"); + return null; + } + + @Override + public TimeSecWriter timeSec() { + fail("TimeSec"); + return null; + } + + + @Override + public TimeMilliWriter timeMilli(String name) { + fail("TimeMilli"); + return null; + } + + @Override + public TimeMilliWriter timeMilli() { + fail("TimeMilli"); + return null; + } + + + @Override + public BigIntWriter bigInt(String name) { + fail("BigInt"); + return null; + } + + @Override + public BigIntWriter bigInt() { + fail("BigInt"); + return null; + } + + + @Override + public UInt8Writer uInt8(String name) { + fail("UInt8"); + return null; + } + + @Override + public UInt8Writer uInt8() { + fail("UInt8"); + return null; + } + + + @Override + public Float8Writer float8(String name) { + fail("Float8"); + return null; + } + + @Override + public Float8Writer float8() { + fail("Float8"); + return null; + } + + + @Override + public DateMilliWriter dateMilli(String name) { + fail("DateMilli"); + return null; + } + + @Override + public DateMilliWriter dateMilli() { + fail("DateMilli"); + return null; + } + + + @Override + public DurationWriter duration(String name, org.apache.arrow.vector.types.TimeUnit unit) { + fail("Duration(" + "unit: " + unit + ", " + ")"); + return null; + } + + @Override + public DurationWriter duration(String name) { + fail("Duration"); + return null; + } + + @Override + public DurationWriter duration() { + fail("Duration"); + return null; + } + + + @Override + public TimeStampSecWriter timeStampSec(String name) { + fail("TimeStampSec"); + return null; + } + + @Override + public TimeStampSecWriter timeStampSec() { + fail("TimeStampSec"); + return null; + } + + + @Override + public TimeStampMilliWriter timeStampMilli(String name) { + fail("TimeStampMilli"); + return null; + } + + @Override + public TimeStampMilliWriter timeStampMilli() { + fail("TimeStampMilli"); + return null; + } + + + @Override + public TimeStampMicroWriter timeStampMicro(String name) { + fail("TimeStampMicro"); + return null; + } + + @Override + public TimeStampMicroWriter timeStampMicro() { + fail("TimeStampMicro"); + return null; + } + + + @Override + public TimeStampNanoWriter timeStampNano(String name) { + fail("TimeStampNano"); + return null; + } + + @Override + public TimeStampNanoWriter timeStampNano() { + fail("TimeStampNano"); + return null; + } + + + @Override + public TimeStampSecTZWriter timeStampSecTZ(String name, String timezone) { + fail("TimeStampSecTZ(" + "timezone: " + timezone + ", " + ")"); + return null; + } + + @Override + public TimeStampSecTZWriter timeStampSecTZ(String name) { + fail("TimeStampSecTZ"); + return null; + } + + @Override + public TimeStampSecTZWriter timeStampSecTZ() { + fail("TimeStampSecTZ"); + return null; + } + + + @Override + public TimeStampMilliTZWriter timeStampMilliTZ(String name, String timezone) { + fail("TimeStampMilliTZ(" + "timezone: " + timezone + ", " + ")"); + return null; + } + + @Override + public TimeStampMilliTZWriter timeStampMilliTZ(String name) { + fail("TimeStampMilliTZ"); + return null; + } + + @Override + public TimeStampMilliTZWriter timeStampMilliTZ() { + fail("TimeStampMilliTZ"); + return null; + } + + + @Override + public TimeStampMicroTZWriter timeStampMicroTZ(String name, String timezone) { + fail("TimeStampMicroTZ(" + "timezone: " + timezone + ", " + ")"); + return null; + } + + @Override + public TimeStampMicroTZWriter timeStampMicroTZ(String name) { + fail("TimeStampMicroTZ"); + return null; + } + + @Override + public TimeStampMicroTZWriter timeStampMicroTZ() { + fail("TimeStampMicroTZ"); + return null; + } + + + @Override + public TimeStampNanoTZWriter timeStampNanoTZ(String name, String timezone) { + fail("TimeStampNanoTZ(" + "timezone: " + timezone + ", " + ")"); + return null; + } + + @Override + public TimeStampNanoTZWriter timeStampNanoTZ(String name) { + fail("TimeStampNanoTZ"); + return null; + } + + @Override + public TimeStampNanoTZWriter timeStampNanoTZ() { + fail("TimeStampNanoTZ"); + return null; + } + + + @Override + public TimeMicroWriter timeMicro(String name) { + fail("TimeMicro"); + return null; + } + + @Override + public TimeMicroWriter timeMicro() { + fail("TimeMicro"); + return null; + } + + + @Override + public TimeNanoWriter timeNano(String name) { + fail("TimeNano"); + return null; + } + + @Override + public TimeNanoWriter timeNano() { + fail("TimeNano"); + return null; + } + + + @Override + public IntervalDayWriter intervalDay(String name) { + fail("IntervalDay"); + return null; + } + + @Override + public IntervalDayWriter intervalDay() { + fail("IntervalDay"); + return null; + } + + + @Override + public IntervalMonthDayNanoWriter intervalMonthDayNano(String name) { + fail("IntervalMonthDayNano"); + return null; + } + + @Override + public IntervalMonthDayNanoWriter intervalMonthDayNano() { + fail("IntervalMonthDayNano"); + return null; + } + + + @Override + public Decimal256Writer decimal256(String name, int scale, int precision) { + fail("Decimal256(" + "scale: " + scale + ", " + "precision: " + precision + ", " + ")"); + return null; + } + + @Override + public Decimal256Writer decimal256(String name) { + fail("Decimal256"); + return null; + } + + @Override + public Decimal256Writer decimal256() { + fail("Decimal256"); + return null; + } + + + @Override + public DecimalWriter decimal(String name, int scale, int precision) { + fail("Decimal(" + "scale: " + scale + ", " + "precision: " + precision + ", " + ")"); + return null; + } + + @Override + public DecimalWriter decimal(String name) { + fail("Decimal"); + return null; + } + + @Override + public DecimalWriter decimal() { + fail("Decimal"); + return null; + } + + + @Override + public FixedSizeBinaryWriter fixedSizeBinary(String name, int byteWidth) { + fail("FixedSizeBinary(" + "byteWidth: " + byteWidth + ", " + ")"); + return null; + } + + @Override + public FixedSizeBinaryWriter fixedSizeBinary(String name) { + fail("FixedSizeBinary"); + return null; + } + + @Override + public FixedSizeBinaryWriter fixedSizeBinary() { + fail("FixedSizeBinary"); + return null; + } + + + @Override + public VarBinaryWriter varBinary(String name) { + fail("VarBinary"); + return null; + } + + @Override + public VarBinaryWriter varBinary() { + fail("VarBinary"); + return null; + } + + + @Override + public VarCharWriter varChar(String name) { + fail("VarChar"); + return null; + } + + @Override + public VarCharWriter varChar() { + fail("VarChar"); + return null; + } + + + @Override + public LargeVarCharWriter largeVarChar(String name) { + fail("LargeVarChar"); + return null; + } + + @Override + public LargeVarCharWriter largeVarChar() { + fail("LargeVarChar"); + return null; + } + + + @Override + public LargeVarBinaryWriter largeVarBinary(String name) { + fail("LargeVarBinary"); + return null; + } + + @Override + public LargeVarBinaryWriter largeVarBinary() { + fail("LargeVarBinary"); + return null; + } + + + @Override + public BitWriter bit(String name) { + fail("Bit"); + return null; + } + + @Override + public BitWriter bit() { + fail("Bit"); + return null; + } + + + public void copyReader(FieldReader reader) { + fail("Copy FieldReader"); + } + + public void copyReaderToField(String name, FieldReader reader) { + fail("Copy FieldReader to STring"); + } + + private void fail(String name) { + throw new IllegalArgumentException(String.format("You tried to write a %s type when you are using a ValueWriter of type %s.", name, this.getClass().getSimpleName())); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/AbstractPromotableFieldWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/AbstractPromotableFieldWriter.java new file mode 100644 index 000000000000..5970153eea8f --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/AbstractPromotableFieldWriter.java @@ -0,0 +1,1161 @@ + + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * A FieldWriter which delegates calls to another FieldWriter. The delegate FieldWriter can be promoted to a new type + * when necessary. Classes that extend this class are responsible for handling promotion. + * + * This class is generated using freemarker and the AbstractPromotableFieldWriter.java template. + * + */ +@SuppressWarnings("unused") +abstract class AbstractPromotableFieldWriter extends AbstractFieldWriter { + /** + * Retrieve the FieldWriter, promoting if it is not a FieldWriter of the specified type + * @param type the type of the values we want to write + * @return the corresponding field writer + */ + protected FieldWriter getWriter(MinorType type) { + return getWriter(type, null); + } + + abstract protected FieldWriter getWriter(MinorType type, ArrowType arrowType); + + /** + * @return the current FieldWriter + */ + abstract protected FieldWriter getWriter(); + + @Override + public void start() { + getWriter(MinorType.STRUCT).start(); + } + + @Override + public void end() { + getWriter(MinorType.STRUCT).end(); + setPosition(idx() + 1); + } + + @Override + public void startList() { + getWriter(MinorType.LIST).startList(); + } + + @Override + public void endList() { + getWriter(MinorType.LIST).endList(); + setPosition(idx() + 1); + } + + @Override + public void startMap() { + getWriter(MinorType.MAP).startMap(); + } + + @Override + public void endMap() { + getWriter(MinorType.MAP).endMap(); + setPosition(idx() + 1); + } + + @Override + public void startEntry() { + getWriter(MinorType.MAP).startEntry(); + } + + @Override + public MapWriter key() { + return getWriter(MinorType.MAP).key(); + } + + @Override + public MapWriter value() { + return getWriter(MinorType.MAP).value(); + } + + @Override + public void endEntry() { + getWriter(MinorType.MAP).endEntry(); + } + + @Override + public void write(TinyIntHolder holder) { + getWriter(MinorType.TINYINT).write(holder); + } + + public void writeTinyInt(byte value) { + getWriter(MinorType.TINYINT).writeTinyInt(value); + } + + + @Override + public void write(UInt1Holder holder) { + getWriter(MinorType.UINT1).write(holder); + } + + public void writeUInt1(byte value) { + getWriter(MinorType.UINT1).writeUInt1(value); + } + + + @Override + public void write(UInt2Holder holder) { + getWriter(MinorType.UINT2).write(holder); + } + + public void writeUInt2(char value) { + getWriter(MinorType.UINT2).writeUInt2(value); + } + + + @Override + public void write(SmallIntHolder holder) { + getWriter(MinorType.SMALLINT).write(holder); + } + + public void writeSmallInt(short value) { + getWriter(MinorType.SMALLINT).writeSmallInt(value); + } + + + @Override + public void write(Float2Holder holder) { + getWriter(MinorType.FLOAT2).write(holder); + } + + public void writeFloat2(short value) { + getWriter(MinorType.FLOAT2).writeFloat2(value); + } + + + @Override + public void write(IntHolder holder) { + getWriter(MinorType.INT).write(holder); + } + + public void writeInt(int value) { + getWriter(MinorType.INT).writeInt(value); + } + + + @Override + public void write(UInt4Holder holder) { + getWriter(MinorType.UINT4).write(holder); + } + + public void writeUInt4(int value) { + getWriter(MinorType.UINT4).writeUInt4(value); + } + + + @Override + public void write(Float4Holder holder) { + getWriter(MinorType.FLOAT4).write(holder); + } + + public void writeFloat4(float value) { + getWriter(MinorType.FLOAT4).writeFloat4(value); + } + + + @Override + public void write(DateDayHolder holder) { + getWriter(MinorType.DATEDAY).write(holder); + } + + public void writeDateDay(int value) { + getWriter(MinorType.DATEDAY).writeDateDay(value); + } + + + @Override + public void write(IntervalYearHolder holder) { + getWriter(MinorType.INTERVALYEAR).write(holder); + } + + public void writeIntervalYear(int value) { + getWriter(MinorType.INTERVALYEAR).writeIntervalYear(value); + } + + + @Override + public void write(TimeSecHolder holder) { + getWriter(MinorType.TIMESEC).write(holder); + } + + public void writeTimeSec(int value) { + getWriter(MinorType.TIMESEC).writeTimeSec(value); + } + + + @Override + public void write(TimeMilliHolder holder) { + getWriter(MinorType.TIMEMILLI).write(holder); + } + + public void writeTimeMilli(int value) { + getWriter(MinorType.TIMEMILLI).writeTimeMilli(value); + } + + + @Override + public void write(BigIntHolder holder) { + getWriter(MinorType.BIGINT).write(holder); + } + + public void writeBigInt(long value) { + getWriter(MinorType.BIGINT).writeBigInt(value); + } + + + @Override + public void write(UInt8Holder holder) { + getWriter(MinorType.UINT8).write(holder); + } + + public void writeUInt8(long value) { + getWriter(MinorType.UINT8).writeUInt8(value); + } + + + @Override + public void write(Float8Holder holder) { + getWriter(MinorType.FLOAT8).write(holder); + } + + public void writeFloat8(double value) { + getWriter(MinorType.FLOAT8).writeFloat8(value); + } + + + @Override + public void write(DateMilliHolder holder) { + getWriter(MinorType.DATEMILLI).write(holder); + } + + public void writeDateMilli(long value) { + getWriter(MinorType.DATEMILLI).writeDateMilli(value); + } + + + @Override + public void write(DurationHolder holder) { + ArrowType.Duration arrowType = new ArrowType.Duration(holder.unit); + getWriter(MinorType.DURATION, arrowType).write(holder); + } + + /** + * @deprecated + * If you experience errors with using this version of the method, switch to the holder version. + * The errors occur when using an untyped or unioned PromotableWriter, because this version of the + * method does not have enough information to infer the ArrowType. + * @see #write(DurationHolder) + */ + @Deprecated + @Override + public void writeDuration(long value) { + getWriter(MinorType.DURATION).writeDuration(value); + } + + + @Override + public void write(TimeStampSecHolder holder) { + getWriter(MinorType.TIMESTAMPSEC).write(holder); + } + + public void writeTimeStampSec(long value) { + getWriter(MinorType.TIMESTAMPSEC).writeTimeStampSec(value); + } + + + @Override + public void write(TimeStampMilliHolder holder) { + getWriter(MinorType.TIMESTAMPMILLI).write(holder); + } + + public void writeTimeStampMilli(long value) { + getWriter(MinorType.TIMESTAMPMILLI).writeTimeStampMilli(value); + } + + + @Override + public void write(TimeStampMicroHolder holder) { + getWriter(MinorType.TIMESTAMPMICRO).write(holder); + } + + public void writeTimeStampMicro(long value) { + getWriter(MinorType.TIMESTAMPMICRO).writeTimeStampMicro(value); + } + + + @Override + public void write(TimeStampNanoHolder holder) { + getWriter(MinorType.TIMESTAMPNANO).write(holder); + } + + public void writeTimeStampNano(long value) { + getWriter(MinorType.TIMESTAMPNANO).writeTimeStampNano(value); + } + + + @Override + public void write(TimeStampSecTZHolder holder) { + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.TIMESTAMPSEC.getType(); + // Take the holder.timezone similar to how PromotableWriter.java:write(DecimalHolder) takes the scale from the holder. + ArrowType.Timestamp arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), holder.timezone); + getWriter(MinorType.TIMESTAMPSECTZ, arrowType).write(holder); + } + + /** + * @deprecated + * The holder version should be used instead otherwise the timezone will default to UTC. + * @see #write(TimeStampSecTZHolder) + */ + @Deprecated + @Override + public void writeTimeStampSecTZ(long value) { + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.TIMESTAMPSEC.getType(); + // Assumes UTC if no timezone is provided + ArrowType.Timestamp arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), "UTC"); + getWriter(MinorType.TIMESTAMPSECTZ, arrowType).writeTimeStampSecTZ(value); + } + + + @Override + public void write(TimeStampMilliTZHolder holder) { + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.TIMESTAMPMILLI.getType(); + // Take the holder.timezone similar to how PromotableWriter.java:write(DecimalHolder) takes the scale from the holder. + ArrowType.Timestamp arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), holder.timezone); + getWriter(MinorType.TIMESTAMPMILLITZ, arrowType).write(holder); + } + + /** + * @deprecated + * The holder version should be used instead otherwise the timezone will default to UTC. + * @see #write(TimeStampMilliTZHolder) + */ + @Deprecated + @Override + public void writeTimeStampMilliTZ(long value) { + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.TIMESTAMPMILLI.getType(); + // Assumes UTC if no timezone is provided + ArrowType.Timestamp arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), "UTC"); + getWriter(MinorType.TIMESTAMPMILLITZ, arrowType).writeTimeStampMilliTZ(value); + } + + + @Override + public void write(TimeStampMicroTZHolder holder) { + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.TIMESTAMPMICRO.getType(); + // Take the holder.timezone similar to how PromotableWriter.java:write(DecimalHolder) takes the scale from the holder. + ArrowType.Timestamp arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), holder.timezone); + getWriter(MinorType.TIMESTAMPMICROTZ, arrowType).write(holder); + } + + /** + * @deprecated + * The holder version should be used instead otherwise the timezone will default to UTC. + * @see #write(TimeStampMicroTZHolder) + */ + @Deprecated + @Override + public void writeTimeStampMicroTZ(long value) { + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.TIMESTAMPMICRO.getType(); + // Assumes UTC if no timezone is provided + ArrowType.Timestamp arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), "UTC"); + getWriter(MinorType.TIMESTAMPMICROTZ, arrowType).writeTimeStampMicroTZ(value); + } + + + @Override + public void write(TimeStampNanoTZHolder holder) { + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.TIMESTAMPNANO.getType(); + // Take the holder.timezone similar to how PromotableWriter.java:write(DecimalHolder) takes the scale from the holder. + ArrowType.Timestamp arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), holder.timezone); + getWriter(MinorType.TIMESTAMPNANOTZ, arrowType).write(holder); + } + + /** + * @deprecated + * The holder version should be used instead otherwise the timezone will default to UTC. + * @see #write(TimeStampNanoTZHolder) + */ + @Deprecated + @Override + public void writeTimeStampNanoTZ(long value) { + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.TIMESTAMPNANO.getType(); + // Assumes UTC if no timezone is provided + ArrowType.Timestamp arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), "UTC"); + getWriter(MinorType.TIMESTAMPNANOTZ, arrowType).writeTimeStampNanoTZ(value); + } + + + @Override + public void write(TimeMicroHolder holder) { + getWriter(MinorType.TIMEMICRO).write(holder); + } + + public void writeTimeMicro(long value) { + getWriter(MinorType.TIMEMICRO).writeTimeMicro(value); + } + + + @Override + public void write(TimeNanoHolder holder) { + getWriter(MinorType.TIMENANO).write(holder); + } + + public void writeTimeNano(long value) { + getWriter(MinorType.TIMENANO).writeTimeNano(value); + } + + + @Override + public void write(IntervalDayHolder holder) { + getWriter(MinorType.INTERVALDAY).write(holder); + } + + public void writeIntervalDay(int days, int milliseconds) { + getWriter(MinorType.INTERVALDAY).writeIntervalDay(days, milliseconds); + } + + + @Override + public void write(IntervalMonthDayNanoHolder holder) { + getWriter(MinorType.INTERVALMONTHDAYNANO).write(holder); + } + + public void writeIntervalMonthDayNano(int months, int days, long nanoseconds) { + getWriter(MinorType.INTERVALMONTHDAYNANO).writeIntervalMonthDayNano(months, days, nanoseconds); + } + + + @Override + public void write(Decimal256Holder holder) { + getWriter(MinorType.DECIMAL256).write(holder); + } + + public void writeDecimal256(long start, ArrowBuf buffer, ArrowType arrowType) { + getWriter(MinorType.DECIMAL256).writeDecimal256(start, buffer, arrowType); + } + + public void writeDecimal256(long start, ArrowBuf buffer) { + getWriter(MinorType.DECIMAL256).writeDecimal256(start, buffer); + } + public void writeBigEndianBytesToDecimal256(byte[] value, ArrowType arrowType) { + getWriter(MinorType.DECIMAL256).writeBigEndianBytesToDecimal256(value, arrowType); + } + + public void writeBigEndianBytesToDecimal256(byte[] value) { + getWriter(MinorType.DECIMAL256).writeBigEndianBytesToDecimal256(value); + } + + + @Override + public void write(DecimalHolder holder) { + getWriter(MinorType.DECIMAL).write(holder); + } + + public void writeDecimal(int start, ArrowBuf buffer, ArrowType arrowType) { + getWriter(MinorType.DECIMAL).writeDecimal(start, buffer, arrowType); + } + + public void writeDecimal(int start, ArrowBuf buffer) { + getWriter(MinorType.DECIMAL).writeDecimal(start, buffer); + } + + public void writeBigEndianBytesToDecimal(byte[] value, ArrowType arrowType) { + getWriter(MinorType.DECIMAL).writeBigEndianBytesToDecimal(value, arrowType); + } + + public void writeBigEndianBytesToDecimal(byte[] value) { + getWriter(MinorType.DECIMAL).writeBigEndianBytesToDecimal(value); + } + + + @Override + public void write(FixedSizeBinaryHolder holder) { + ArrowType.FixedSizeBinary arrowType = new ArrowType.FixedSizeBinary(holder.byteWidth); + getWriter(MinorType.FIXEDSIZEBINARY, arrowType).write(holder); + } + + /** + * @deprecated + * If you experience errors with using this version of the method, switch to the holder version. + * The errors occur when using an untyped or unioned PromotableWriter, because this version of the + * method does not have enough information to infer the ArrowType. + * @see #write(FixedSizeBinaryHolder) + */ + @Deprecated + @Override + public void writeFixedSizeBinary(ArrowBuf buffer) { + getWriter(MinorType.FIXEDSIZEBINARY).writeFixedSizeBinary(buffer); + } + + + @Override + public void write(VarBinaryHolder holder) { + getWriter(MinorType.VARBINARY).write(holder); + } + + public void writeVarBinary(int start, int end, ArrowBuf buffer) { + getWriter(MinorType.VARBINARY).writeVarBinary(start, end, buffer); + } + + @Override + public void writeVarBinary(byte[] value) { + getWriter(MinorType.VARBINARY).writeVarBinary(value); + } + + @Override + public void writeVarBinary(byte[] value, int offset, int length) { + getWriter(MinorType.VARBINARY).writeVarBinary(value, offset, length); + } + + @Override + public void writeVarBinary(ByteBuffer value) { + getWriter(MinorType.VARBINARY).writeVarBinary(value); + } + + @Override + public void writeVarBinary(ByteBuffer value, int offset, int length) { + getWriter(MinorType.VARBINARY).writeVarBinary(value, offset, length); + } + + @Override + public void write(VarCharHolder holder) { + getWriter(MinorType.VARCHAR).write(holder); + } + + public void writeVarChar(int start, int end, ArrowBuf buffer) { + getWriter(MinorType.VARCHAR).writeVarChar(start, end, buffer); + } + + @Override + public void writeVarChar(Text value) { + getWriter(MinorType.VARCHAR).writeVarChar(value); + } + + @Override + public void writeVarChar(String value) { + getWriter(MinorType.VARCHAR).writeVarChar(value); + } + + @Override + public void write(LargeVarCharHolder holder) { + getWriter(MinorType.LARGEVARCHAR).write(holder); + } + + public void writeLargeVarChar(long start, long end, ArrowBuf buffer) { + getWriter(MinorType.LARGEVARCHAR).writeLargeVarChar(start, end, buffer); + } + + @Override + public void writeLargeVarChar(Text value) { + getWriter(MinorType.LARGEVARCHAR).writeLargeVarChar(value); + } + + @Override + public void writeLargeVarChar(String value) { + getWriter(MinorType.LARGEVARCHAR).writeLargeVarChar(value); + } + + @Override + public void write(LargeVarBinaryHolder holder) { + getWriter(MinorType.LARGEVARBINARY).write(holder); + } + + public void writeLargeVarBinary(long start, long end, ArrowBuf buffer) { + getWriter(MinorType.LARGEVARBINARY).writeLargeVarBinary(start, end, buffer); + } + + @Override + public void writeLargeVarBinary(byte[] value) { + getWriter(MinorType.LARGEVARBINARY).writeLargeVarBinary(value); + } + + @Override + public void writeLargeVarBinary(byte[] value, int offset, int length) { + getWriter(MinorType.LARGEVARBINARY).writeLargeVarBinary(value, offset, length); + } + + @Override + public void writeLargeVarBinary(ByteBuffer value) { + getWriter(MinorType.LARGEVARBINARY).writeLargeVarBinary(value); + } + + @Override + public void writeLargeVarBinary(ByteBuffer value, int offset, int length) { + getWriter(MinorType.LARGEVARBINARY).writeLargeVarBinary(value, offset, length); + } + + @Override + public void write(BitHolder holder) { + getWriter(MinorType.BIT).write(holder); + } + + public void writeBit(int value) { + getWriter(MinorType.BIT).writeBit(value); + } + + + public void writeNull() { + } + + @Override + public StructWriter struct() { + return getWriter(MinorType.LIST).struct(); + } + + @Override + public ListWriter list() { + return getWriter(MinorType.LIST).list(); + } + + @Override + public MapWriter map() { + return getWriter(MinorType.LIST).map(); + } + + @Override + public MapWriter map(boolean keysSorted) { + return getWriter(MinorType.MAP, new ArrowType.Map(keysSorted)); + } + + @Override + public StructWriter struct(String name) { + return getWriter(MinorType.STRUCT).struct(name); + } + + @Override + public ListWriter list(String name) { + return getWriter(MinorType.STRUCT).list(name); + } + + @Override + public MapWriter map(String name) { + return getWriter(MinorType.STRUCT).map(name); + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + return getWriter(MinorType.STRUCT).map(name, keysSorted); + } + + @Override + public TinyIntWriter tinyInt(String name) { + return getWriter(MinorType.STRUCT).tinyInt(name); + } + + @Override + public TinyIntWriter tinyInt() { + return getWriter(MinorType.LIST).tinyInt(); + } + + + @Override + public UInt1Writer uInt1(String name) { + return getWriter(MinorType.STRUCT).uInt1(name); + } + + @Override + public UInt1Writer uInt1() { + return getWriter(MinorType.LIST).uInt1(); + } + + + @Override + public UInt2Writer uInt2(String name) { + return getWriter(MinorType.STRUCT).uInt2(name); + } + + @Override + public UInt2Writer uInt2() { + return getWriter(MinorType.LIST).uInt2(); + } + + + @Override + public SmallIntWriter smallInt(String name) { + return getWriter(MinorType.STRUCT).smallInt(name); + } + + @Override + public SmallIntWriter smallInt() { + return getWriter(MinorType.LIST).smallInt(); + } + + + @Override + public Float2Writer float2(String name) { + return getWriter(MinorType.STRUCT).float2(name); + } + + @Override + public Float2Writer float2() { + return getWriter(MinorType.LIST).float2(); + } + + + @Override + public IntWriter integer(String name) { + return getWriter(MinorType.STRUCT).integer(name); + } + + @Override + public IntWriter integer() { + return getWriter(MinorType.LIST).integer(); + } + + + @Override + public UInt4Writer uInt4(String name) { + return getWriter(MinorType.STRUCT).uInt4(name); + } + + @Override + public UInt4Writer uInt4() { + return getWriter(MinorType.LIST).uInt4(); + } + + + @Override + public Float4Writer float4(String name) { + return getWriter(MinorType.STRUCT).float4(name); + } + + @Override + public Float4Writer float4() { + return getWriter(MinorType.LIST).float4(); + } + + + @Override + public DateDayWriter dateDay(String name) { + return getWriter(MinorType.STRUCT).dateDay(name); + } + + @Override + public DateDayWriter dateDay() { + return getWriter(MinorType.LIST).dateDay(); + } + + + @Override + public IntervalYearWriter intervalYear(String name) { + return getWriter(MinorType.STRUCT).intervalYear(name); + } + + @Override + public IntervalYearWriter intervalYear() { + return getWriter(MinorType.LIST).intervalYear(); + } + + + @Override + public TimeSecWriter timeSec(String name) { + return getWriter(MinorType.STRUCT).timeSec(name); + } + + @Override + public TimeSecWriter timeSec() { + return getWriter(MinorType.LIST).timeSec(); + } + + + @Override + public TimeMilliWriter timeMilli(String name) { + return getWriter(MinorType.STRUCT).timeMilli(name); + } + + @Override + public TimeMilliWriter timeMilli() { + return getWriter(MinorType.LIST).timeMilli(); + } + + + @Override + public BigIntWriter bigInt(String name) { + return getWriter(MinorType.STRUCT).bigInt(name); + } + + @Override + public BigIntWriter bigInt() { + return getWriter(MinorType.LIST).bigInt(); + } + + + @Override + public UInt8Writer uInt8(String name) { + return getWriter(MinorType.STRUCT).uInt8(name); + } + + @Override + public UInt8Writer uInt8() { + return getWriter(MinorType.LIST).uInt8(); + } + + + @Override + public Float8Writer float8(String name) { + return getWriter(MinorType.STRUCT).float8(name); + } + + @Override + public Float8Writer float8() { + return getWriter(MinorType.LIST).float8(); + } + + + @Override + public DateMilliWriter dateMilli(String name) { + return getWriter(MinorType.STRUCT).dateMilli(name); + } + + @Override + public DateMilliWriter dateMilli() { + return getWriter(MinorType.LIST).dateMilli(); + } + + + @Override + public DurationWriter duration(String name, org.apache.arrow.vector.types.TimeUnit unit) { + return getWriter(MinorType.STRUCT).duration(name, unit); + } + + @Override + public DurationWriter duration(String name) { + return getWriter(MinorType.STRUCT).duration(name); + } + + @Override + public DurationWriter duration() { + return getWriter(MinorType.LIST).duration(); + } + + + @Override + public TimeStampSecWriter timeStampSec(String name) { + return getWriter(MinorType.STRUCT).timeStampSec(name); + } + + @Override + public TimeStampSecWriter timeStampSec() { + return getWriter(MinorType.LIST).timeStampSec(); + } + + + @Override + public TimeStampMilliWriter timeStampMilli(String name) { + return getWriter(MinorType.STRUCT).timeStampMilli(name); + } + + @Override + public TimeStampMilliWriter timeStampMilli() { + return getWriter(MinorType.LIST).timeStampMilli(); + } + + + @Override + public TimeStampMicroWriter timeStampMicro(String name) { + return getWriter(MinorType.STRUCT).timeStampMicro(name); + } + + @Override + public TimeStampMicroWriter timeStampMicro() { + return getWriter(MinorType.LIST).timeStampMicro(); + } + + + @Override + public TimeStampNanoWriter timeStampNano(String name) { + return getWriter(MinorType.STRUCT).timeStampNano(name); + } + + @Override + public TimeStampNanoWriter timeStampNano() { + return getWriter(MinorType.LIST).timeStampNano(); + } + + + @Override + public TimeStampSecTZWriter timeStampSecTZ(String name, String timezone) { + return getWriter(MinorType.STRUCT).timeStampSecTZ(name, timezone); + } + + @Override + public TimeStampSecTZWriter timeStampSecTZ(String name) { + return getWriter(MinorType.STRUCT).timeStampSecTZ(name); + } + + @Override + public TimeStampSecTZWriter timeStampSecTZ() { + return getWriter(MinorType.LIST).timeStampSecTZ(); + } + + + @Override + public TimeStampMilliTZWriter timeStampMilliTZ(String name, String timezone) { + return getWriter(MinorType.STRUCT).timeStampMilliTZ(name, timezone); + } + + @Override + public TimeStampMilliTZWriter timeStampMilliTZ(String name) { + return getWriter(MinorType.STRUCT).timeStampMilliTZ(name); + } + + @Override + public TimeStampMilliTZWriter timeStampMilliTZ() { + return getWriter(MinorType.LIST).timeStampMilliTZ(); + } + + + @Override + public TimeStampMicroTZWriter timeStampMicroTZ(String name, String timezone) { + return getWriter(MinorType.STRUCT).timeStampMicroTZ(name, timezone); + } + + @Override + public TimeStampMicroTZWriter timeStampMicroTZ(String name) { + return getWriter(MinorType.STRUCT).timeStampMicroTZ(name); + } + + @Override + public TimeStampMicroTZWriter timeStampMicroTZ() { + return getWriter(MinorType.LIST).timeStampMicroTZ(); + } + + + @Override + public TimeStampNanoTZWriter timeStampNanoTZ(String name, String timezone) { + return getWriter(MinorType.STRUCT).timeStampNanoTZ(name, timezone); + } + + @Override + public TimeStampNanoTZWriter timeStampNanoTZ(String name) { + return getWriter(MinorType.STRUCT).timeStampNanoTZ(name); + } + + @Override + public TimeStampNanoTZWriter timeStampNanoTZ() { + return getWriter(MinorType.LIST).timeStampNanoTZ(); + } + + + @Override + public TimeMicroWriter timeMicro(String name) { + return getWriter(MinorType.STRUCT).timeMicro(name); + } + + @Override + public TimeMicroWriter timeMicro() { + return getWriter(MinorType.LIST).timeMicro(); + } + + + @Override + public TimeNanoWriter timeNano(String name) { + return getWriter(MinorType.STRUCT).timeNano(name); + } + + @Override + public TimeNanoWriter timeNano() { + return getWriter(MinorType.LIST).timeNano(); + } + + + @Override + public IntervalDayWriter intervalDay(String name) { + return getWriter(MinorType.STRUCT).intervalDay(name); + } + + @Override + public IntervalDayWriter intervalDay() { + return getWriter(MinorType.LIST).intervalDay(); + } + + + @Override + public IntervalMonthDayNanoWriter intervalMonthDayNano(String name) { + return getWriter(MinorType.STRUCT).intervalMonthDayNano(name); + } + + @Override + public IntervalMonthDayNanoWriter intervalMonthDayNano() { + return getWriter(MinorType.LIST).intervalMonthDayNano(); + } + + + @Override + public Decimal256Writer decimal256(String name, int scale, int precision) { + return getWriter(MinorType.STRUCT).decimal256(name, scale, precision); + } + + @Override + public Decimal256Writer decimal256(String name) { + return getWriter(MinorType.STRUCT).decimal256(name); + } + + @Override + public Decimal256Writer decimal256() { + return getWriter(MinorType.LIST).decimal256(); + } + + + @Override + public DecimalWriter decimal(String name, int scale, int precision) { + return getWriter(MinorType.STRUCT).decimal(name, scale, precision); + } + + @Override + public DecimalWriter decimal(String name) { + return getWriter(MinorType.STRUCT).decimal(name); + } + + @Override + public DecimalWriter decimal() { + return getWriter(MinorType.LIST).decimal(); + } + + + @Override + public FixedSizeBinaryWriter fixedSizeBinary(String name, int byteWidth) { + return getWriter(MinorType.STRUCT).fixedSizeBinary(name, byteWidth); + } + + @Override + public FixedSizeBinaryWriter fixedSizeBinary(String name) { + return getWriter(MinorType.STRUCT).fixedSizeBinary(name); + } + + @Override + public FixedSizeBinaryWriter fixedSizeBinary() { + return getWriter(MinorType.LIST).fixedSizeBinary(); + } + + + @Override + public VarBinaryWriter varBinary(String name) { + return getWriter(MinorType.STRUCT).varBinary(name); + } + + @Override + public VarBinaryWriter varBinary() { + return getWriter(MinorType.LIST).varBinary(); + } + + + @Override + public VarCharWriter varChar(String name) { + return getWriter(MinorType.STRUCT).varChar(name); + } + + @Override + public VarCharWriter varChar() { + return getWriter(MinorType.LIST).varChar(); + } + + + @Override + public LargeVarCharWriter largeVarChar(String name) { + return getWriter(MinorType.STRUCT).largeVarChar(name); + } + + @Override + public LargeVarCharWriter largeVarChar() { + return getWriter(MinorType.LIST).largeVarChar(); + } + + + @Override + public LargeVarBinaryWriter largeVarBinary(String name) { + return getWriter(MinorType.STRUCT).largeVarBinary(name); + } + + @Override + public LargeVarBinaryWriter largeVarBinary() { + return getWriter(MinorType.LIST).largeVarBinary(); + } + + + @Override + public BitWriter bit(String name) { + return getWriter(MinorType.STRUCT).bit(name); + } + + @Override + public BitWriter bit() { + return getWriter(MinorType.LIST).bit(); + } + + + public void copyReader(FieldReader reader) { + getWriter().copyReader(reader); + } + + public void copyReaderToField(String name, FieldReader reader) { + getWriter().copyReaderToField(name, reader); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/BigIntHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/BigIntHolderReaderImpl.java new file mode 100644 index 000000000000..5461ca8e73af --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/BigIntHolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class BigIntHolderReaderImpl extends AbstractFieldReader { + + private BigIntHolder holder; + public BigIntHolderReaderImpl(BigIntHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.BIGINT; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(BigIntHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableBigIntHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Long readLong() { + + Long value = new Long(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readLong(); + } + + public void copyAsValue(BigIntWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/BigIntReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/BigIntReaderImpl.java new file mode 100644 index 000000000000..a28e2e0e2b44 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/BigIntReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class BigIntReaderImpl extends AbstractFieldReader { + + private final BigIntVector vector; + + public BigIntReaderImpl(BigIntVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(BigIntWriter writer){ + BigIntWriterImpl impl = (BigIntWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + BigIntWriterImpl impl = (BigIntWriterImpl) writer.bigInt(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableBigIntHolder h){ + vector.get(idx(), h); + } + + public Long readLong(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/BigIntWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/BigIntWriterImpl.java new file mode 100644 index 000000000000..071c9f4ffde2 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/BigIntWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class BigIntWriterImpl extends AbstractFieldWriter { + + final BigIntVector vector; + + +public BigIntWriterImpl(BigIntVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(BigIntHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableBigIntHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeBigInt(long value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/BitHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/BitHolderReaderImpl.java new file mode 100644 index 000000000000..67c579e6c13f --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/BitHolderReaderImpl.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class BitHolderReaderImpl extends AbstractFieldReader { + + private BitHolder holder; + public BitHolderReaderImpl(BitHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.BIT; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(BitHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableBitHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Boolean readBoolean() { + + return new Boolean(holder.value != 0); + } + + @Override + public Object readObject() { + return readBoolean(); + } + + public void copyAsValue(BitWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/BitReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/BitReaderImpl.java new file mode 100644 index 000000000000..ffc5301b5ab6 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/BitReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class BitReaderImpl extends AbstractFieldReader { + + private final BitVector vector; + + public BitReaderImpl(BitVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(BitWriter writer){ + BitWriterImpl impl = (BitWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + BitWriterImpl impl = (BitWriterImpl) writer.bit(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableBitHolder h){ + vector.get(idx(), h); + } + + public Boolean readBoolean(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/BitWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/BitWriterImpl.java new file mode 100644 index 000000000000..6b14f365cbee --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/BitWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class BitWriterImpl extends AbstractFieldWriter { + + final BitVector vector; + + +public BitWriterImpl(BitVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(BitHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableBitHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeBit(int value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/ComplexCopier.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/ComplexCopier.java new file mode 100644 index 000000000000..3cdb091897e4 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/ComplexCopier.java @@ -0,0 +1,860 @@ + + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/* + * This class is generated using freemarker and the ComplexCopier.java template. + */ +@SuppressWarnings("unused") +public class ComplexCopier { + + /** + * Do a deep copy of the value in input into output + * @param input field to read from + * @param output field to write to + */ + public static void copy(FieldReader input, FieldWriter output) { + writeValue(input, output); + } + + private static void writeValue(FieldReader reader, FieldWriter writer) { + final MinorType mt = reader.getMinorType(); + + switch (mt) { + + case LIST: + case LARGELIST: + case FIXED_SIZE_LIST: + if (reader.isSet()) { + writer.startList(); + while (reader.next()) { + FieldReader childReader = reader.reader(); + FieldWriter childWriter = getListWriterForReader(childReader, writer); + if (childReader.isSet()) { + writeValue(childReader, childWriter); + } else { + childWriter.writeNull(); + } + } + writer.endList(); + } else { + writer.writeNull(); + } + break; + case MAP: + if (reader.isSet()) { + UnionMapReader mapReader = (UnionMapReader) reader; + writer.startMap(); + while (mapReader.next()) { + FieldReader structReader = reader.reader(); + if (structReader.isSet()) { + writer.startEntry(); + writeValue(mapReader.key(), getMapWriterForReader(mapReader.key(), writer.key())); + writeValue(mapReader.value(), getMapWriterForReader(mapReader.value(), writer.value())); + writer.endEntry(); + } else { + writer.writeNull(); + } + } + writer.endMap(); + } else { + writer.writeNull(); + } + break; + case STRUCT: + if (reader.isSet()) { + writer.start(); + for(String name : reader){ + FieldReader childReader = reader.reader(name); + if (childReader.getMinorType() != Types.MinorType.NULL) { + FieldWriter childWriter = getStructWriterForReader(childReader, writer, name); + if (childReader.isSet()) { + writeValue(childReader, childWriter); + } else { + childWriter.writeNull(); + } + } + } + writer.end(); + } else { + writer.writeNull(); + } + break; + + + case TINYINT: + if (reader.isSet()) { + NullableTinyIntHolder tinyIntHolder = new NullableTinyIntHolder(); + reader.read(tinyIntHolder); + if (tinyIntHolder.isSet == 1) { + writer.writeTinyInt(tinyIntHolder.value); + } + } else { + writer.writeNull(); + } + break; + + + + case UINT1: + if (reader.isSet()) { + NullableUInt1Holder uInt1Holder = new NullableUInt1Holder(); + reader.read(uInt1Holder); + if (uInt1Holder.isSet == 1) { + writer.writeUInt1(uInt1Holder.value); + } + } else { + writer.writeNull(); + } + break; + + + + case UINT2: + if (reader.isSet()) { + NullableUInt2Holder uInt2Holder = new NullableUInt2Holder(); + reader.read(uInt2Holder); + if (uInt2Holder.isSet == 1) { + writer.writeUInt2(uInt2Holder.value); + } + } else { + writer.writeNull(); + } + break; + + + + case SMALLINT: + if (reader.isSet()) { + NullableSmallIntHolder smallIntHolder = new NullableSmallIntHolder(); + reader.read(smallIntHolder); + if (smallIntHolder.isSet == 1) { + writer.writeSmallInt(smallIntHolder.value); + } + } else { + writer.writeNull(); + } + break; + + + + case FLOAT2: + if (reader.isSet()) { + NullableFloat2Holder float2Holder = new NullableFloat2Holder(); + reader.read(float2Holder); + if (float2Holder.isSet == 1) { + writer.writeFloat2(float2Holder.value); + } + } else { + writer.writeNull(); + } + break; + + + + case INT: + if (reader.isSet()) { + NullableIntHolder intHolder = new NullableIntHolder(); + reader.read(intHolder); + if (intHolder.isSet == 1) { + writer.writeInt(intHolder.value); + } + } else { + writer.writeNull(); + } + break; + + + + case UINT4: + if (reader.isSet()) { + NullableUInt4Holder uInt4Holder = new NullableUInt4Holder(); + reader.read(uInt4Holder); + if (uInt4Holder.isSet == 1) { + writer.writeUInt4(uInt4Holder.value); + } + } else { + writer.writeNull(); + } + break; + + + + case FLOAT4: + if (reader.isSet()) { + NullableFloat4Holder float4Holder = new NullableFloat4Holder(); + reader.read(float4Holder); + if (float4Holder.isSet == 1) { + writer.writeFloat4(float4Holder.value); + } + } else { + writer.writeNull(); + } + break; + + + + case DATEDAY: + if (reader.isSet()) { + NullableDateDayHolder dateDayHolder = new NullableDateDayHolder(); + reader.read(dateDayHolder); + if (dateDayHolder.isSet == 1) { + writer.writeDateDay(dateDayHolder.value); + } + } else { + writer.writeNull(); + } + break; + + + + case INTERVALYEAR: + if (reader.isSet()) { + NullableIntervalYearHolder intervalYearHolder = new NullableIntervalYearHolder(); + reader.read(intervalYearHolder); + if (intervalYearHolder.isSet == 1) { + writer.writeIntervalYear(intervalYearHolder.value); + } + } else { + writer.writeNull(); + } + break; + + + + case TIMESEC: + if (reader.isSet()) { + NullableTimeSecHolder timeSecHolder = new NullableTimeSecHolder(); + reader.read(timeSecHolder); + if (timeSecHolder.isSet == 1) { + writer.writeTimeSec(timeSecHolder.value); + } + } else { + writer.writeNull(); + } + break; + + + + case TIMEMILLI: + if (reader.isSet()) { + NullableTimeMilliHolder timeMilliHolder = new NullableTimeMilliHolder(); + reader.read(timeMilliHolder); + if (timeMilliHolder.isSet == 1) { + writer.writeTimeMilli(timeMilliHolder.value); + } + } else { + writer.writeNull(); + } + break; + + + + case BIGINT: + if (reader.isSet()) { + NullableBigIntHolder bigIntHolder = new NullableBigIntHolder(); + reader.read(bigIntHolder); + if (bigIntHolder.isSet == 1) { + writer.writeBigInt(bigIntHolder.value); + } + } else { + writer.writeNull(); + } + break; + + + + case UINT8: + if (reader.isSet()) { + NullableUInt8Holder uInt8Holder = new NullableUInt8Holder(); + reader.read(uInt8Holder); + if (uInt8Holder.isSet == 1) { + writer.writeUInt8(uInt8Holder.value); + } + } else { + writer.writeNull(); + } + break; + + + + case FLOAT8: + if (reader.isSet()) { + NullableFloat8Holder float8Holder = new NullableFloat8Holder(); + reader.read(float8Holder); + if (float8Holder.isSet == 1) { + writer.writeFloat8(float8Holder.value); + } + } else { + writer.writeNull(); + } + break; + + + + case DATEMILLI: + if (reader.isSet()) { + NullableDateMilliHolder dateMilliHolder = new NullableDateMilliHolder(); + reader.read(dateMilliHolder); + if (dateMilliHolder.isSet == 1) { + writer.writeDateMilli(dateMilliHolder.value); + } + } else { + writer.writeNull(); + } + break; + + + + + case TIMESTAMPSEC: + if (reader.isSet()) { + NullableTimeStampSecHolder timeStampSecHolder = new NullableTimeStampSecHolder(); + reader.read(timeStampSecHolder); + if (timeStampSecHolder.isSet == 1) { + writer.writeTimeStampSec(timeStampSecHolder.value); + } + } else { + writer.writeNull(); + } + break; + + + + case TIMESTAMPMILLI: + if (reader.isSet()) { + NullableTimeStampMilliHolder timeStampMilliHolder = new NullableTimeStampMilliHolder(); + reader.read(timeStampMilliHolder); + if (timeStampMilliHolder.isSet == 1) { + writer.writeTimeStampMilli(timeStampMilliHolder.value); + } + } else { + writer.writeNull(); + } + break; + + + + case TIMESTAMPMICRO: + if (reader.isSet()) { + NullableTimeStampMicroHolder timeStampMicroHolder = new NullableTimeStampMicroHolder(); + reader.read(timeStampMicroHolder); + if (timeStampMicroHolder.isSet == 1) { + writer.writeTimeStampMicro(timeStampMicroHolder.value); + } + } else { + writer.writeNull(); + } + break; + + + + case TIMESTAMPNANO: + if (reader.isSet()) { + NullableTimeStampNanoHolder timeStampNanoHolder = new NullableTimeStampNanoHolder(); + reader.read(timeStampNanoHolder); + if (timeStampNanoHolder.isSet == 1) { + writer.writeTimeStampNano(timeStampNanoHolder.value); + } + } else { + writer.writeNull(); + } + break; + + + + + + + + case TIMEMICRO: + if (reader.isSet()) { + NullableTimeMicroHolder timeMicroHolder = new NullableTimeMicroHolder(); + reader.read(timeMicroHolder); + if (timeMicroHolder.isSet == 1) { + writer.writeTimeMicro(timeMicroHolder.value); + } + } else { + writer.writeNull(); + } + break; + + + + case TIMENANO: + if (reader.isSet()) { + NullableTimeNanoHolder timeNanoHolder = new NullableTimeNanoHolder(); + reader.read(timeNanoHolder); + if (timeNanoHolder.isSet == 1) { + writer.writeTimeNano(timeNanoHolder.value); + } + } else { + writer.writeNull(); + } + break; + + + + case INTERVALDAY: + if (reader.isSet()) { + NullableIntervalDayHolder intervalDayHolder = new NullableIntervalDayHolder(); + reader.read(intervalDayHolder); + if (intervalDayHolder.isSet == 1) { + writer.writeIntervalDay(intervalDayHolder.days, intervalDayHolder.milliseconds); + } + } else { + writer.writeNull(); + } + break; + + + + case INTERVALMONTHDAYNANO: + if (reader.isSet()) { + NullableIntervalMonthDayNanoHolder intervalMonthDayNanoHolder = new NullableIntervalMonthDayNanoHolder(); + reader.read(intervalMonthDayNanoHolder); + if (intervalMonthDayNanoHolder.isSet == 1) { + writer.writeIntervalMonthDayNano(intervalMonthDayNanoHolder.months, intervalMonthDayNanoHolder.days, intervalMonthDayNanoHolder.nanoseconds); + } + } else { + writer.writeNull(); + } + break; + + + + case DECIMAL256: + if (reader.isSet()) { + NullableDecimal256Holder decimal256Holder = new NullableDecimal256Holder(); + reader.read(decimal256Holder); + if (decimal256Holder.isSet == 1) { + writer.writeDecimal256(decimal256Holder.start, decimal256Holder.buffer, new ArrowType.Decimal(decimal256Holder.precision, decimal256Holder.scale, Decimal256Holder.WIDTH * 8)); + } + } else { + writer.writeNull(); + } + break; + + + + case DECIMAL: + if (reader.isSet()) { + NullableDecimalHolder decimalHolder = new NullableDecimalHolder(); + reader.read(decimalHolder); + if (decimalHolder.isSet == 1) { + writer.writeDecimal(decimalHolder.start, decimalHolder.buffer, new ArrowType.Decimal(decimalHolder.precision, decimalHolder.scale, DecimalHolder.WIDTH * 8)); + } + } else { + writer.writeNull(); + } + break; + + + + + case VARBINARY: + if (reader.isSet()) { + NullableVarBinaryHolder varBinaryHolder = new NullableVarBinaryHolder(); + reader.read(varBinaryHolder); + if (varBinaryHolder.isSet == 1) { + writer.writeVarBinary(varBinaryHolder.start, varBinaryHolder.end, varBinaryHolder.buffer); + } + } else { + writer.writeNull(); + } + break; + + + + case VARCHAR: + if (reader.isSet()) { + NullableVarCharHolder varCharHolder = new NullableVarCharHolder(); + reader.read(varCharHolder); + if (varCharHolder.isSet == 1) { + writer.writeVarChar(varCharHolder.start, varCharHolder.end, varCharHolder.buffer); + } + } else { + writer.writeNull(); + } + break; + + + + case LARGEVARCHAR: + if (reader.isSet()) { + NullableLargeVarCharHolder largeVarCharHolder = new NullableLargeVarCharHolder(); + reader.read(largeVarCharHolder); + if (largeVarCharHolder.isSet == 1) { + writer.writeLargeVarChar(largeVarCharHolder.start, largeVarCharHolder.end, largeVarCharHolder.buffer); + } + } else { + writer.writeNull(); + } + break; + + + + case LARGEVARBINARY: + if (reader.isSet()) { + NullableLargeVarBinaryHolder largeVarBinaryHolder = new NullableLargeVarBinaryHolder(); + reader.read(largeVarBinaryHolder); + if (largeVarBinaryHolder.isSet == 1) { + writer.writeLargeVarBinary(largeVarBinaryHolder.start, largeVarBinaryHolder.end, largeVarBinaryHolder.buffer); + } + } else { + writer.writeNull(); + } + break; + + + + case BIT: + if (reader.isSet()) { + NullableBitHolder bitHolder = new NullableBitHolder(); + reader.read(bitHolder); + if (bitHolder.isSet == 1) { + writer.writeBit(bitHolder.value); + } + } else { + writer.writeNull(); + } + break; + + } + } + + private static FieldWriter getStructWriterForReader(FieldReader reader, StructWriter writer, String name) { + switch (reader.getMinorType()) { + case TINYINT: + return (FieldWriter) writer.tinyInt(name); + + case UINT1: + return (FieldWriter) writer.uInt1(name); + + case UINT2: + return (FieldWriter) writer.uInt2(name); + + case SMALLINT: + return (FieldWriter) writer.smallInt(name); + + case FLOAT2: + return (FieldWriter) writer.float2(name); + + case INT: + return (FieldWriter) writer.integer(name); + + case UINT4: + return (FieldWriter) writer.uInt4(name); + + case FLOAT4: + return (FieldWriter) writer.float4(name); + + case DATEDAY: + return (FieldWriter) writer.dateDay(name); + + case INTERVALYEAR: + return (FieldWriter) writer.intervalYear(name); + + case TIMESEC: + return (FieldWriter) writer.timeSec(name); + + case TIMEMILLI: + return (FieldWriter) writer.timeMilli(name); + + case BIGINT: + return (FieldWriter) writer.bigInt(name); + + case UINT8: + return (FieldWriter) writer.uInt8(name); + + case FLOAT8: + return (FieldWriter) writer.float8(name); + + case DATEMILLI: + return (FieldWriter) writer.dateMilli(name); + + + case TIMESTAMPSEC: + return (FieldWriter) writer.timeStampSec(name); + + case TIMESTAMPMILLI: + return (FieldWriter) writer.timeStampMilli(name); + + case TIMESTAMPMICRO: + return (FieldWriter) writer.timeStampMicro(name); + + case TIMESTAMPNANO: + return (FieldWriter) writer.timeStampNano(name); + + + + + + case TIMEMICRO: + return (FieldWriter) writer.timeMicro(name); + + case TIMENANO: + return (FieldWriter) writer.timeNano(name); + + case INTERVALDAY: + return (FieldWriter) writer.intervalDay(name); + + case INTERVALMONTHDAYNANO: + return (FieldWriter) writer.intervalMonthDayNano(name); + + case DECIMAL256: + if (reader.getField().getType() instanceof ArrowType.Decimal) { + ArrowType.Decimal type = (ArrowType.Decimal) reader.getField().getType(); + return (FieldWriter) writer.decimal256(name, type.getScale(), type.getPrecision()); + } else { + return (FieldWriter) writer.decimal256(name); + } + + case DECIMAL: + if (reader.getField().getType() instanceof ArrowType.Decimal) { + ArrowType.Decimal type = (ArrowType.Decimal) reader.getField().getType(); + return (FieldWriter) writer.decimal(name, type.getScale(), type.getPrecision()); + } else { + return (FieldWriter) writer.decimal(name); + } + + + case VARBINARY: + return (FieldWriter) writer.varBinary(name); + + case VARCHAR: + return (FieldWriter) writer.varChar(name); + + case LARGEVARCHAR: + return (FieldWriter) writer.largeVarChar(name); + + case LARGEVARBINARY: + return (FieldWriter) writer.largeVarBinary(name); + + case BIT: + return (FieldWriter) writer.bit(name); + + case STRUCT: + return (FieldWriter) writer.struct(name); + case FIXED_SIZE_LIST: + case LIST: + return (FieldWriter) writer.list(name); + case MAP: + return (FieldWriter) writer.map(name); + default: + throw new UnsupportedOperationException(reader.getMinorType().toString()); + } + } + + private static FieldWriter getListWriterForReader(FieldReader reader, ListWriter writer) { + switch (reader.getMinorType()) { + case TINYINT: + return (FieldWriter) writer.tinyInt(); + case UINT1: + return (FieldWriter) writer.uInt1(); + case UINT2: + return (FieldWriter) writer.uInt2(); + case SMALLINT: + return (FieldWriter) writer.smallInt(); + case FLOAT2: + return (FieldWriter) writer.float2(); + case INT: + return (FieldWriter) writer.integer(); + case UINT4: + return (FieldWriter) writer.uInt4(); + case FLOAT4: + return (FieldWriter) writer.float4(); + case DATEDAY: + return (FieldWriter) writer.dateDay(); + case INTERVALYEAR: + return (FieldWriter) writer.intervalYear(); + case TIMESEC: + return (FieldWriter) writer.timeSec(); + case TIMEMILLI: + return (FieldWriter) writer.timeMilli(); + case BIGINT: + return (FieldWriter) writer.bigInt(); + case UINT8: + return (FieldWriter) writer.uInt8(); + case FLOAT8: + return (FieldWriter) writer.float8(); + case DATEMILLI: + return (FieldWriter) writer.dateMilli(); + case TIMESTAMPSEC: + return (FieldWriter) writer.timeStampSec(); + case TIMESTAMPMILLI: + return (FieldWriter) writer.timeStampMilli(); + case TIMESTAMPMICRO: + return (FieldWriter) writer.timeStampMicro(); + case TIMESTAMPNANO: + return (FieldWriter) writer.timeStampNano(); + case TIMEMICRO: + return (FieldWriter) writer.timeMicro(); + case TIMENANO: + return (FieldWriter) writer.timeNano(); + case INTERVALDAY: + return (FieldWriter) writer.intervalDay(); + case INTERVALMONTHDAYNANO: + return (FieldWriter) writer.intervalMonthDayNano(); + case DECIMAL256: + return (FieldWriter) writer.decimal256(); + case DECIMAL: + return (FieldWriter) writer.decimal(); + case VARBINARY: + return (FieldWriter) writer.varBinary(); + case VARCHAR: + return (FieldWriter) writer.varChar(); + case LARGEVARCHAR: + return (FieldWriter) writer.largeVarChar(); + case LARGEVARBINARY: + return (FieldWriter) writer.largeVarBinary(); + case BIT: + return (FieldWriter) writer.bit(); + case STRUCT: + return (FieldWriter) writer.struct(); + case FIXED_SIZE_LIST: + case LIST: + case MAP: + case NULL: + return (FieldWriter) writer.list(); + default: + throw new UnsupportedOperationException(reader.getMinorType().toString()); + } + } + + private static FieldWriter getMapWriterForReader(FieldReader reader, MapWriter writer) { + switch (reader.getMinorType()) { + case TINYINT: + return (FieldWriter) writer.tinyInt(); + case UINT1: + return (FieldWriter) writer.uInt1(); + case UINT2: + return (FieldWriter) writer.uInt2(); + case SMALLINT: + return (FieldWriter) writer.smallInt(); + case FLOAT2: + return (FieldWriter) writer.float2(); + case INT: + return (FieldWriter) writer.integer(); + case UINT4: + return (FieldWriter) writer.uInt4(); + case FLOAT4: + return (FieldWriter) writer.float4(); + case DATEDAY: + return (FieldWriter) writer.dateDay(); + case INTERVALYEAR: + return (FieldWriter) writer.intervalYear(); + case TIMESEC: + return (FieldWriter) writer.timeSec(); + case TIMEMILLI: + return (FieldWriter) writer.timeMilli(); + case BIGINT: + return (FieldWriter) writer.bigInt(); + case UINT8: + return (FieldWriter) writer.uInt8(); + case FLOAT8: + return (FieldWriter) writer.float8(); + case DATEMILLI: + return (FieldWriter) writer.dateMilli(); + case TIMESTAMPSEC: + return (FieldWriter) writer.timeStampSec(); + case TIMESTAMPMILLI: + return (FieldWriter) writer.timeStampMilli(); + case TIMESTAMPMICRO: + return (FieldWriter) writer.timeStampMicro(); + case TIMESTAMPNANO: + return (FieldWriter) writer.timeStampNano(); + case TIMEMICRO: + return (FieldWriter) writer.timeMicro(); + case TIMENANO: + return (FieldWriter) writer.timeNano(); + case INTERVALDAY: + return (FieldWriter) writer.intervalDay(); + case INTERVALMONTHDAYNANO: + return (FieldWriter) writer.intervalMonthDayNano(); + case DECIMAL256: + return (FieldWriter) writer.decimal256(); + case DECIMAL: + return (FieldWriter) writer.decimal(); + case VARBINARY: + return (FieldWriter) writer.varBinary(); + case VARCHAR: + return (FieldWriter) writer.varChar(); + case LARGEVARCHAR: + return (FieldWriter) writer.largeVarChar(); + case LARGEVARBINARY: + return (FieldWriter) writer.largeVarBinary(); + case BIT: + return (FieldWriter) writer.bit(); + case STRUCT: + return (FieldWriter) writer.struct(); + case FIXED_SIZE_LIST: + case LIST: + case NULL: + return (FieldWriter) writer.list(); + case MAP: + return (FieldWriter) writer.map(false); + default: + throw new UnsupportedOperationException(reader.getMinorType().toString()); + } + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DateDayHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DateDayHolderReaderImpl.java new file mode 100644 index 000000000000..28122ab6a37d --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DateDayHolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class DateDayHolderReaderImpl extends AbstractFieldReader { + + private DateDayHolder holder; + public DateDayHolderReaderImpl(DateDayHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.DATEDAY; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(DateDayHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableDateDayHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Integer readInteger() { + + Integer value = new Integer(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readInteger(); + } + + public void copyAsValue(DateDayWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DateDayReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DateDayReaderImpl.java new file mode 100644 index 000000000000..6d313124b2f3 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DateDayReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class DateDayReaderImpl extends AbstractFieldReader { + + private final DateDayVector vector; + + public DateDayReaderImpl(DateDayVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(DateDayWriter writer){ + DateDayWriterImpl impl = (DateDayWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + DateDayWriterImpl impl = (DateDayWriterImpl) writer.dateDay(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableDateDayHolder h){ + vector.get(idx(), h); + } + + public Integer readInteger(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DateDayWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DateDayWriterImpl.java new file mode 100644 index 000000000000..3de74e982f90 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DateDayWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class DateDayWriterImpl extends AbstractFieldWriter { + + final DateDayVector vector; + + +public DateDayWriterImpl(DateDayVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(DateDayHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableDateDayHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeDateDay(int value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DateMilliHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DateMilliHolderReaderImpl.java new file mode 100644 index 000000000000..a6a1b34b32c6 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DateMilliHolderReaderImpl.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class DateMilliHolderReaderImpl extends AbstractFieldReader { + + private DateMilliHolder holder; + public DateMilliHolderReaderImpl(DateMilliHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.DATEMILLI; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(DateMilliHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableDateMilliHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public LocalDateTime readLocalDateTime() { + + return DateUtility.getLocalDateTimeFromEpochMilli(holder.value); + } + + @Override + public Object readObject() { + return readLocalDateTime(); + } + + public void copyAsValue(DateMilliWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DateMilliReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DateMilliReaderImpl.java new file mode 100644 index 000000000000..4904570524aa --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DateMilliReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class DateMilliReaderImpl extends AbstractFieldReader { + + private final DateMilliVector vector; + + public DateMilliReaderImpl(DateMilliVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(DateMilliWriter writer){ + DateMilliWriterImpl impl = (DateMilliWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + DateMilliWriterImpl impl = (DateMilliWriterImpl) writer.dateMilli(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableDateMilliHolder h){ + vector.get(idx(), h); + } + + public LocalDateTime readLocalDateTime(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DateMilliWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DateMilliWriterImpl.java new file mode 100644 index 000000000000..9e297b9eee6f --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DateMilliWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class DateMilliWriterImpl extends AbstractFieldWriter { + + final DateMilliVector vector; + + +public DateMilliWriterImpl(DateMilliVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(DateMilliHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableDateMilliHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeDateMilli(long value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Decimal256HolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Decimal256HolderReaderImpl.java new file mode 100644 index 000000000000..e46a6597c736 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Decimal256HolderReaderImpl.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class Decimal256HolderReaderImpl extends AbstractFieldReader { + + private Decimal256Holder holder; + public Decimal256HolderReaderImpl(Decimal256Holder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.DECIMAL256; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(Decimal256Holder h) { + h.start = holder.start; + h.buffer = holder.buffer; + h.scale = holder.scale; + h.precision = holder.precision; + } + + @Override + public void read(NullableDecimal256Holder h) { + h.start = holder.start; + h.buffer = holder.buffer; + h.scale = holder.scale; + h.precision = holder.precision; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public BigDecimal readBigDecimal() { + + byte[] bytes = new byte[32]; + holder.buffer.getBytes(holder.start, bytes, 0, 32); + BigDecimal value = new BigDecimal(new BigInteger(bytes), holder.scale); + return value; + } + + @Override + public Object readObject() { + return readBigDecimal(); + } + + public void copyAsValue(Decimal256Writer writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Decimal256ReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Decimal256ReaderImpl.java new file mode 100644 index 000000000000..76bf541f3768 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Decimal256ReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class Decimal256ReaderImpl extends AbstractFieldReader { + + private final Decimal256Vector vector; + + public Decimal256ReaderImpl(Decimal256Vector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(Decimal256Writer writer){ + Decimal256WriterImpl impl = (Decimal256WriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + Decimal256WriterImpl impl = (Decimal256WriterImpl) writer.decimal256(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableDecimal256Holder h){ + vector.get(idx(), h); + } + + public BigDecimal readBigDecimal(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Decimal256WriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Decimal256WriterImpl.java new file mode 100644 index 000000000000..555c3949bd04 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Decimal256WriterImpl.java @@ -0,0 +1,161 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class Decimal256WriterImpl extends AbstractFieldWriter { + + final Decimal256Vector vector; + + +public Decimal256WriterImpl(Decimal256Vector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + + + + public void write(Decimal256Holder h){ + DecimalUtility.checkPrecisionAndScale(h.precision, h.scale, vector.getPrecision(), vector.getScale()); + vector.setSafe(idx(), h); + vector.setValueCount(idx() + 1); + } + + public void write(NullableDecimal256Holder h){ + if (h.isSet == 1) { + DecimalUtility.checkPrecisionAndScale(h.precision, h.scale, vector.getPrecision(), vector.getScale()); + } + vector.setSafe(idx(), h); + vector.setValueCount(idx() + 1); + } + + public void writeDecimal256(long start, ArrowBuf buffer){ + vector.setSafe(idx(), 1, start, buffer); + vector.setValueCount(idx() + 1); + } + + public void writeDecimal256(long start, ArrowBuf buffer, ArrowType arrowType){ + DecimalUtility.checkPrecisionAndScale(((ArrowType.Decimal) arrowType).getPrecision(), + ((ArrowType.Decimal) arrowType).getScale(), vector.getPrecision(), vector.getScale()); + vector.setSafe(idx(), 1, start, buffer); + vector.setValueCount(idx() + 1); + } + + public void writeDecimal256(BigDecimal value){ + // vector.setSafe already does precision and scale checking + vector.setSafe(idx(), value); + vector.setValueCount(idx() + 1); + } + + public void writeBigEndianBytesToDecimal256(byte[] value, ArrowType arrowType){ + DecimalUtility.checkPrecisionAndScale(((ArrowType.Decimal) arrowType).getPrecision(), + ((ArrowType.Decimal) arrowType).getScale(), vector.getPrecision(), vector.getScale()); + vector.setBigEndianSafe(idx(), value); + vector.setValueCount(idx() + 1); + } + + public void writeBigEndianBytesToDecimal256(byte[] value){ + vector.setBigEndianSafe(idx(), value); + vector.setValueCount(idx() + 1); + } + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DecimalHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DecimalHolderReaderImpl.java new file mode 100644 index 000000000000..8b21fdbb789d --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DecimalHolderReaderImpl.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class DecimalHolderReaderImpl extends AbstractFieldReader { + + private DecimalHolder holder; + public DecimalHolderReaderImpl(DecimalHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.DECIMAL; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(DecimalHolder h) { + h.start = holder.start; + h.buffer = holder.buffer; + h.scale = holder.scale; + h.precision = holder.precision; + } + + @Override + public void read(NullableDecimalHolder h) { + h.start = holder.start; + h.buffer = holder.buffer; + h.scale = holder.scale; + h.precision = holder.precision; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public BigDecimal readBigDecimal() { + + byte[] bytes = new byte[16]; + holder.buffer.getBytes(holder.start, bytes, 0, 16); + BigDecimal value = new BigDecimal(new BigInteger(bytes), holder.scale); + return value; + } + + @Override + public Object readObject() { + return readBigDecimal(); + } + + public void copyAsValue(DecimalWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DecimalReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DecimalReaderImpl.java new file mode 100644 index 000000000000..0ff5e8d292d0 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DecimalReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class DecimalReaderImpl extends AbstractFieldReader { + + private final DecimalVector vector; + + public DecimalReaderImpl(DecimalVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(DecimalWriter writer){ + DecimalWriterImpl impl = (DecimalWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + DecimalWriterImpl impl = (DecimalWriterImpl) writer.decimal(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableDecimalHolder h){ + vector.get(idx(), h); + } + + public BigDecimal readBigDecimal(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DecimalWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DecimalWriterImpl.java new file mode 100644 index 000000000000..bf6269ea474d --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DecimalWriterImpl.java @@ -0,0 +1,161 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class DecimalWriterImpl extends AbstractFieldWriter { + + final DecimalVector vector; + + +public DecimalWriterImpl(DecimalVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + + + + public void write(DecimalHolder h){ + DecimalUtility.checkPrecisionAndScale(h.precision, h.scale, vector.getPrecision(), vector.getScale()); + vector.setSafe(idx(), h); + vector.setValueCount(idx() + 1); + } + + public void write(NullableDecimalHolder h){ + if (h.isSet == 1) { + DecimalUtility.checkPrecisionAndScale(h.precision, h.scale, vector.getPrecision(), vector.getScale()); + } + vector.setSafe(idx(), h); + vector.setValueCount(idx() + 1); + } + + public void writeDecimal(long start, ArrowBuf buffer){ + vector.setSafe(idx(), 1, start, buffer); + vector.setValueCount(idx() + 1); + } + + public void writeDecimal(long start, ArrowBuf buffer, ArrowType arrowType){ + DecimalUtility.checkPrecisionAndScale(((ArrowType.Decimal) arrowType).getPrecision(), + ((ArrowType.Decimal) arrowType).getScale(), vector.getPrecision(), vector.getScale()); + vector.setSafe(idx(), 1, start, buffer); + vector.setValueCount(idx() + 1); + } + + public void writeDecimal(BigDecimal value){ + // vector.setSafe already does precision and scale checking + vector.setSafe(idx(), value); + vector.setValueCount(idx() + 1); + } + + public void writeBigEndianBytesToDecimal(byte[] value, ArrowType arrowType){ + DecimalUtility.checkPrecisionAndScale(((ArrowType.Decimal) arrowType).getPrecision(), + ((ArrowType.Decimal) arrowType).getScale(), vector.getPrecision(), vector.getScale()); + vector.setBigEndianSafe(idx(), value); + vector.setValueCount(idx() + 1); + } + + public void writeBigEndianBytesToDecimal(byte[] value){ + vector.setBigEndianSafe(idx(), value); + vector.setValueCount(idx() + 1); + } + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DenseUnionReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DenseUnionReader.java new file mode 100644 index 000000000000..8979136619a5 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DenseUnionReader.java @@ -0,0 +1,953 @@ + + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template DenseUnionReader.java + */ +@SuppressWarnings("unused") +public class DenseUnionReader extends AbstractFieldReader { + + private BaseReader[] readers = new BaseReader[Byte.MAX_VALUE + 1]; + public DenseUnionVector data; + + public DenseUnionReader(DenseUnionVector data) { + this.data = data; + } + + public MinorType getMinorType() { + byte typeId = data.getTypeId(idx()); + return data.getVectorByType(typeId).getMinorType(); + } + + public byte getTypeId() { + return data.getTypeId(idx()); + } + + @Override + public Field getField() { + return data.getField(); + } + + public boolean isSet(){ + return !data.isNull(idx()); + } + + public void read(DenseUnionHolder holder) { + holder.reader = this; + holder.isSet = this.isSet() ? 1 : 0; + holder.typeId = getTypeId(); + } + + public void read(int index, UnionHolder holder) { + byte typeId = data.getTypeId(index); + getList(typeId).read(index, holder); + } + + private FieldReader getReaderForIndex(int index) { + byte typeId = data.getTypeId(index); + MinorType minorType = data.getVectorByType(typeId).getMinorType(); + FieldReader reader = (FieldReader) readers[typeId]; + if (reader != null) { + return reader; + } + switch (minorType) { + case NULL: + reader = NullReader.INSTANCE; + break; + case STRUCT: + reader = (FieldReader) getStruct(typeId); + break; + case LIST: + reader = (FieldReader) getList(typeId); + break; + case TINYINT: + reader = (FieldReader) getTinyInt(typeId); + break; + case UINT1: + reader = (FieldReader) getUInt1(typeId); + break; + case UINT2: + reader = (FieldReader) getUInt2(typeId); + break; + case SMALLINT: + reader = (FieldReader) getSmallInt(typeId); + break; + case FLOAT2: + reader = (FieldReader) getFloat2(typeId); + break; + case INT: + reader = (FieldReader) getInt(typeId); + break; + case UINT4: + reader = (FieldReader) getUInt4(typeId); + break; + case FLOAT4: + reader = (FieldReader) getFloat4(typeId); + break; + case DATEDAY: + reader = (FieldReader) getDateDay(typeId); + break; + case INTERVALYEAR: + reader = (FieldReader) getIntervalYear(typeId); + break; + case TIMESEC: + reader = (FieldReader) getTimeSec(typeId); + break; + case TIMEMILLI: + reader = (FieldReader) getTimeMilli(typeId); + break; + case BIGINT: + reader = (FieldReader) getBigInt(typeId); + break; + case UINT8: + reader = (FieldReader) getUInt8(typeId); + break; + case FLOAT8: + reader = (FieldReader) getFloat8(typeId); + break; + case DATEMILLI: + reader = (FieldReader) getDateMilli(typeId); + break; + case TIMESTAMPSEC: + reader = (FieldReader) getTimeStampSec(typeId); + break; + case TIMESTAMPMILLI: + reader = (FieldReader) getTimeStampMilli(typeId); + break; + case TIMESTAMPMICRO: + reader = (FieldReader) getTimeStampMicro(typeId); + break; + case TIMESTAMPNANO: + reader = (FieldReader) getTimeStampNano(typeId); + break; + case TIMEMICRO: + reader = (FieldReader) getTimeMicro(typeId); + break; + case TIMENANO: + reader = (FieldReader) getTimeNano(typeId); + break; + case INTERVALDAY: + reader = (FieldReader) getIntervalDay(typeId); + break; + case INTERVALMONTHDAYNANO: + reader = (FieldReader) getIntervalMonthDayNano(typeId); + break; + case DECIMAL256: + reader = (FieldReader) getDecimal256(typeId); + break; + case DECIMAL: + reader = (FieldReader) getDecimal(typeId); + break; + case VARBINARY: + reader = (FieldReader) getVarBinary(typeId); + break; + case VARCHAR: + reader = (FieldReader) getVarChar(typeId); + break; + case LARGEVARCHAR: + reader = (FieldReader) getLargeVarChar(typeId); + break; + case LARGEVARBINARY: + reader = (FieldReader) getLargeVarBinary(typeId); + break; + case BIT: + reader = (FieldReader) getBit(typeId); + break; + default: + throw new UnsupportedOperationException("Unsupported type: " + MinorType.values()[typeId]); + } + return reader; + } + + private SingleStructReaderImpl structReader; + + private StructReader getStruct(byte typeId) { + StructReader structReader = (StructReader) readers[typeId]; + if (structReader == null) { + structReader = (SingleStructReaderImpl) data.getVectorByType(typeId).getReader(); + structReader.setPosition(idx()); + readers[typeId] = structReader; + } + return structReader; + } + + private UnionListReader listReader; + + private FieldReader getList(byte typeId) { + UnionListReader listReader = (UnionListReader) readers[typeId]; + if (listReader == null) { + listReader = new UnionListReader((ListVector) data.getVectorByType(typeId)); + listReader.setPosition(idx()); + readers[typeId] = listReader; + } + return listReader; + } + + private UnionMapReader mapReader; + + private FieldReader getMap(byte typeId) { + UnionMapReader mapReader = (UnionMapReader) readers[typeId]; + if (mapReader == null) { + mapReader = new UnionMapReader((MapVector) data.getVectorByType(typeId)); + mapReader.setPosition(idx()); + readers[typeId] = mapReader; + } + return mapReader; + } + + @Override + public java.util.Iterator iterator() { + throw new UnsupportedOperationException(); + } + + @Override + public void copyAsValue(UnionWriter writer) { + writer.data.copyFrom(idx(), writer.idx(), data); + } + + + @Override + public Object readObject() { + return getReaderForIndex(idx()).readObject(); + } + + + @Override + public BigDecimal readBigDecimal() { + return getReaderForIndex(idx()).readBigDecimal(); + } + + + @Override + public Short readShort() { + return getReaderForIndex(idx()).readShort(); + } + + + @Override + public Integer readInteger() { + return getReaderForIndex(idx()).readInteger(); + } + + + @Override + public Long readLong() { + return getReaderForIndex(idx()).readLong(); + } + + + @Override + public Boolean readBoolean() { + return getReaderForIndex(idx()).readBoolean(); + } + + + @Override + public LocalDateTime readLocalDateTime() { + return getReaderForIndex(idx()).readLocalDateTime(); + } + + + @Override + public Duration readDuration() { + return getReaderForIndex(idx()).readDuration(); + } + + + @Override + public Period readPeriod() { + return getReaderForIndex(idx()).readPeriod(); + } + + + @Override + public Double readDouble() { + return getReaderForIndex(idx()).readDouble(); + } + + + @Override + public Float readFloat() { + return getReaderForIndex(idx()).readFloat(); + } + + + @Override + public Character readCharacter() { + return getReaderForIndex(idx()).readCharacter(); + } + + + @Override + public Text readText() { + return getReaderForIndex(idx()).readText(); + } + + + @Override + public Byte readByte() { + return getReaderForIndex(idx()).readByte(); + } + + + @Override + public byte[] readByteArray() { + return getReaderForIndex(idx()).readByteArray(); + } + + + @Override + public PeriodDuration readPeriodDuration() { + return getReaderForIndex(idx()).readPeriodDuration(); + } + + + public int size() { + return getReaderForIndex(idx()).size(); + } + + + private TinyIntReaderImpl getTinyInt(byte typeId) { + TinyIntReaderImpl reader = (TinyIntReaderImpl) readers[typeId]; + if (reader == null) { + reader = new TinyIntReaderImpl((TinyIntVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableTinyIntHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(TinyIntWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private UInt1ReaderImpl getUInt1(byte typeId) { + UInt1ReaderImpl reader = (UInt1ReaderImpl) readers[typeId]; + if (reader == null) { + reader = new UInt1ReaderImpl((UInt1Vector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableUInt1Holder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(UInt1Writer writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private UInt2ReaderImpl getUInt2(byte typeId) { + UInt2ReaderImpl reader = (UInt2ReaderImpl) readers[typeId]; + if (reader == null) { + reader = new UInt2ReaderImpl((UInt2Vector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableUInt2Holder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(UInt2Writer writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private SmallIntReaderImpl getSmallInt(byte typeId) { + SmallIntReaderImpl reader = (SmallIntReaderImpl) readers[typeId]; + if (reader == null) { + reader = new SmallIntReaderImpl((SmallIntVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableSmallIntHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(SmallIntWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private Float2ReaderImpl getFloat2(byte typeId) { + Float2ReaderImpl reader = (Float2ReaderImpl) readers[typeId]; + if (reader == null) { + reader = new Float2ReaderImpl((Float2Vector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableFloat2Holder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(Float2Writer writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private IntReaderImpl getInt(byte typeId) { + IntReaderImpl reader = (IntReaderImpl) readers[typeId]; + if (reader == null) { + reader = new IntReaderImpl((IntVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableIntHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(IntWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private UInt4ReaderImpl getUInt4(byte typeId) { + UInt4ReaderImpl reader = (UInt4ReaderImpl) readers[typeId]; + if (reader == null) { + reader = new UInt4ReaderImpl((UInt4Vector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableUInt4Holder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(UInt4Writer writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private Float4ReaderImpl getFloat4(byte typeId) { + Float4ReaderImpl reader = (Float4ReaderImpl) readers[typeId]; + if (reader == null) { + reader = new Float4ReaderImpl((Float4Vector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableFloat4Holder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(Float4Writer writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private DateDayReaderImpl getDateDay(byte typeId) { + DateDayReaderImpl reader = (DateDayReaderImpl) readers[typeId]; + if (reader == null) { + reader = new DateDayReaderImpl((DateDayVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableDateDayHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(DateDayWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private IntervalYearReaderImpl getIntervalYear(byte typeId) { + IntervalYearReaderImpl reader = (IntervalYearReaderImpl) readers[typeId]; + if (reader == null) { + reader = new IntervalYearReaderImpl((IntervalYearVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableIntervalYearHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(IntervalYearWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private TimeSecReaderImpl getTimeSec(byte typeId) { + TimeSecReaderImpl reader = (TimeSecReaderImpl) readers[typeId]; + if (reader == null) { + reader = new TimeSecReaderImpl((TimeSecVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableTimeSecHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(TimeSecWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private TimeMilliReaderImpl getTimeMilli(byte typeId) { + TimeMilliReaderImpl reader = (TimeMilliReaderImpl) readers[typeId]; + if (reader == null) { + reader = new TimeMilliReaderImpl((TimeMilliVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableTimeMilliHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(TimeMilliWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private BigIntReaderImpl getBigInt(byte typeId) { + BigIntReaderImpl reader = (BigIntReaderImpl) readers[typeId]; + if (reader == null) { + reader = new BigIntReaderImpl((BigIntVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableBigIntHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(BigIntWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private UInt8ReaderImpl getUInt8(byte typeId) { + UInt8ReaderImpl reader = (UInt8ReaderImpl) readers[typeId]; + if (reader == null) { + reader = new UInt8ReaderImpl((UInt8Vector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableUInt8Holder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(UInt8Writer writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private Float8ReaderImpl getFloat8(byte typeId) { + Float8ReaderImpl reader = (Float8ReaderImpl) readers[typeId]; + if (reader == null) { + reader = new Float8ReaderImpl((Float8Vector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableFloat8Holder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(Float8Writer writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private DateMilliReaderImpl getDateMilli(byte typeId) { + DateMilliReaderImpl reader = (DateMilliReaderImpl) readers[typeId]; + if (reader == null) { + reader = new DateMilliReaderImpl((DateMilliVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableDateMilliHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(DateMilliWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private TimeStampSecReaderImpl getTimeStampSec(byte typeId) { + TimeStampSecReaderImpl reader = (TimeStampSecReaderImpl) readers[typeId]; + if (reader == null) { + reader = new TimeStampSecReaderImpl((TimeStampSecVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableTimeStampSecHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(TimeStampSecWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private TimeStampMilliReaderImpl getTimeStampMilli(byte typeId) { + TimeStampMilliReaderImpl reader = (TimeStampMilliReaderImpl) readers[typeId]; + if (reader == null) { + reader = new TimeStampMilliReaderImpl((TimeStampMilliVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableTimeStampMilliHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(TimeStampMilliWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private TimeStampMicroReaderImpl getTimeStampMicro(byte typeId) { + TimeStampMicroReaderImpl reader = (TimeStampMicroReaderImpl) readers[typeId]; + if (reader == null) { + reader = new TimeStampMicroReaderImpl((TimeStampMicroVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableTimeStampMicroHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(TimeStampMicroWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private TimeStampNanoReaderImpl getTimeStampNano(byte typeId) { + TimeStampNanoReaderImpl reader = (TimeStampNanoReaderImpl) readers[typeId]; + if (reader == null) { + reader = new TimeStampNanoReaderImpl((TimeStampNanoVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableTimeStampNanoHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(TimeStampNanoWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private TimeMicroReaderImpl getTimeMicro(byte typeId) { + TimeMicroReaderImpl reader = (TimeMicroReaderImpl) readers[typeId]; + if (reader == null) { + reader = new TimeMicroReaderImpl((TimeMicroVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableTimeMicroHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(TimeMicroWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private TimeNanoReaderImpl getTimeNano(byte typeId) { + TimeNanoReaderImpl reader = (TimeNanoReaderImpl) readers[typeId]; + if (reader == null) { + reader = new TimeNanoReaderImpl((TimeNanoVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableTimeNanoHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(TimeNanoWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private IntervalDayReaderImpl getIntervalDay(byte typeId) { + IntervalDayReaderImpl reader = (IntervalDayReaderImpl) readers[typeId]; + if (reader == null) { + reader = new IntervalDayReaderImpl((IntervalDayVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableIntervalDayHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(IntervalDayWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private IntervalMonthDayNanoReaderImpl getIntervalMonthDayNano(byte typeId) { + IntervalMonthDayNanoReaderImpl reader = (IntervalMonthDayNanoReaderImpl) readers[typeId]; + if (reader == null) { + reader = new IntervalMonthDayNanoReaderImpl((IntervalMonthDayNanoVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableIntervalMonthDayNanoHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(IntervalMonthDayNanoWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private Decimal256ReaderImpl getDecimal256(byte typeId) { + Decimal256ReaderImpl reader = (Decimal256ReaderImpl) readers[typeId]; + if (reader == null) { + reader = new Decimal256ReaderImpl((Decimal256Vector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableDecimal256Holder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(Decimal256Writer writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private DecimalReaderImpl getDecimal(byte typeId) { + DecimalReaderImpl reader = (DecimalReaderImpl) readers[typeId]; + if (reader == null) { + reader = new DecimalReaderImpl((DecimalVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableDecimalHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(DecimalWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private VarBinaryReaderImpl getVarBinary(byte typeId) { + VarBinaryReaderImpl reader = (VarBinaryReaderImpl) readers[typeId]; + if (reader == null) { + reader = new VarBinaryReaderImpl((VarBinaryVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableVarBinaryHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(VarBinaryWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private VarCharReaderImpl getVarChar(byte typeId) { + VarCharReaderImpl reader = (VarCharReaderImpl) readers[typeId]; + if (reader == null) { + reader = new VarCharReaderImpl((VarCharVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableVarCharHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(VarCharWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private LargeVarCharReaderImpl getLargeVarChar(byte typeId) { + LargeVarCharReaderImpl reader = (LargeVarCharReaderImpl) readers[typeId]; + if (reader == null) { + reader = new LargeVarCharReaderImpl((LargeVarCharVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableLargeVarCharHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(LargeVarCharWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private LargeVarBinaryReaderImpl getLargeVarBinary(byte typeId) { + LargeVarBinaryReaderImpl reader = (LargeVarBinaryReaderImpl) readers[typeId]; + if (reader == null) { + reader = new LargeVarBinaryReaderImpl((LargeVarBinaryVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableLargeVarBinaryHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(LargeVarBinaryWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private BitReaderImpl getBit(byte typeId) { + BitReaderImpl reader = (BitReaderImpl) readers[typeId]; + if (reader == null) { + reader = new BitReaderImpl((BitVector) data.getVectorByType(typeId)); + reader.setPosition(idx()); + readers[typeId] = reader; + } + return reader; + } + + public void read(NullableBitHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(BitWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + @Override + public void copyAsValue(ListWriter writer) { + ComplexCopier.copy(this, (FieldWriter) writer); + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + byte typeId = data.getTypeId(index); + if (readers[typeId] != null) { + int offset = data.getOffset(index); + readers[typeId].setPosition(offset); + } + } + + public FieldReader reader(byte typeId, String name){ + return getStruct(typeId).reader(name); + } + + public FieldReader reader(byte typeId) { + return getList(typeId).reader(); + } + + public boolean next() { + return getReaderForIndex(idx()).next(); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DenseUnionWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DenseUnionWriter.java new file mode 100644 index 000000000000..adc4ccf2331f --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DenseUnionWriter.java @@ -0,0 +1,1572 @@ + + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + import org.apache.arrow.vector.complex.writer.BaseWriter; + import org.apache.arrow.vector.types.Types.MinorType; + +/* + * This class is generated using freemarker and the DenseUnionWriter.java template. + */ +@SuppressWarnings("unused") +public class DenseUnionWriter extends AbstractFieldWriter implements FieldWriter { + + DenseUnionVector data; + + private BaseWriter[] writers = new BaseWriter[Byte.MAX_VALUE + 1]; + private final NullableStructWriterFactory nullableStructWriterFactory; + + public DenseUnionWriter(DenseUnionVector vector) { + this(vector, NullableStructWriterFactory.getNullableStructWriterFactoryInstance()); + } + + public DenseUnionWriter(DenseUnionVector vector, NullableStructWriterFactory nullableStructWriterFactory) { + data = vector; + this.nullableStructWriterFactory = nullableStructWriterFactory; + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + for (BaseWriter writer : writers) { + writer.setPosition(index); + } + } + + @Override + public void start() { + byte typeId = data.getTypeId(idx()); + getStructWriter((byte) idx()).start(); + } + + @Override + public void end() { + byte typeId = data.getTypeId(idx()); + getStructWriter(typeId).end(); + } + + @Override + public void startList() { + byte typeId = data.getTypeId(idx()); + getListWriter(typeId).startList(); + } + + @Override + public void endList() { + byte typeId = data.getTypeId(idx()); + getListWriter(typeId).endList(); + } + + private StructWriter getStructWriter(byte typeId) { + StructWriter structWriter = (StructWriter) writers[typeId]; + if (structWriter == null) { + structWriter = nullableStructWriterFactory.build((StructVector) data.getVectorByType(typeId)); + writers[typeId] = structWriter; + } + return structWriter; + } + + public StructWriter asStruct(byte typeId) { + data.setTypeId(idx(), typeId); + return getStructWriter(typeId); + } + + private ListWriter getListWriter(byte typeId) { + ListWriter listWriter = (ListWriter) writers[typeId]; + if (listWriter == null) { + listWriter = new UnionListWriter((ListVector) data.getVectorByType(typeId), nullableStructWriterFactory); + writers[typeId] = listWriter; + } + return listWriter; + } + + public ListWriter asList(byte typeId) { + data.setTypeId(idx(), typeId); + return getListWriter(typeId); + } + + private MapWriter getMapWriter(byte typeId) { + MapWriter mapWriter = (MapWriter) writers[typeId]; + if (mapWriter == null) { + mapWriter = new UnionMapWriter((MapVector) data.getVectorByType(typeId)); + writers[typeId] = mapWriter; + } + return mapWriter; + } + + public MapWriter asMap(byte typeId) { + data.setTypeId(idx(), typeId); + return getMapWriter(typeId); + } + + BaseWriter getWriter(byte typeId) { + MinorType minorType = data.getVectorByType(typeId).getMinorType(); + switch (minorType) { + case STRUCT: + return getStructWriter(typeId); + case LIST: + return getListWriter(typeId); + case MAP: + return getMapWriter(typeId); + case TINYINT: + return getTinyIntWriter(typeId); + case UINT1: + return getUInt1Writer(typeId); + case UINT2: + return getUInt2Writer(typeId); + case SMALLINT: + return getSmallIntWriter(typeId); + case FLOAT2: + return getFloat2Writer(typeId); + case INT: + return getIntWriter(typeId); + case UINT4: + return getUInt4Writer(typeId); + case FLOAT4: + return getFloat4Writer(typeId); + case DATEDAY: + return getDateDayWriter(typeId); + case INTERVALYEAR: + return getIntervalYearWriter(typeId); + case TIMESEC: + return getTimeSecWriter(typeId); + case TIMEMILLI: + return getTimeMilliWriter(typeId); + case BIGINT: + return getBigIntWriter(typeId); + case UINT8: + return getUInt8Writer(typeId); + case FLOAT8: + return getFloat8Writer(typeId); + case DATEMILLI: + return getDateMilliWriter(typeId); + case TIMESTAMPSEC: + return getTimeStampSecWriter(typeId); + case TIMESTAMPMILLI: + return getTimeStampMilliWriter(typeId); + case TIMESTAMPMICRO: + return getTimeStampMicroWriter(typeId); + case TIMESTAMPNANO: + return getTimeStampNanoWriter(typeId); + case TIMEMICRO: + return getTimeMicroWriter(typeId); + case TIMENANO: + return getTimeNanoWriter(typeId); + case INTERVALDAY: + return getIntervalDayWriter(typeId); + case INTERVALMONTHDAYNANO: + return getIntervalMonthDayNanoWriter(typeId); + case DECIMAL256: + return getDecimal256Writer(typeId); + case DECIMAL: + return getDecimalWriter(typeId); + case VARBINARY: + return getVarBinaryWriter(typeId); + case VARCHAR: + return getVarCharWriter(typeId); + case LARGEVARCHAR: + return getLargeVarCharWriter(typeId); + case LARGEVARBINARY: + return getLargeVarBinaryWriter(typeId); + case BIT: + return getBitWriter(typeId); + default: + throw new UnsupportedOperationException("Unknown type: " + minorType); + } + } + + private TinyIntWriter getTinyIntWriter(byte typeId) { + TinyIntWriter writer = (TinyIntWriter) writers[typeId]; + if (writer == null) { + writer = new TinyIntWriterImpl((TinyIntVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public TinyIntWriter asTinyInt(byte typeId) { + data.setTypeId(idx(), typeId); + return getTinyIntWriter(typeId); + } + + @Override + public void write(TinyIntHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeTinyInt(byte value, byte typeId) { + data.setTypeId(idx(), typeId); + getTinyIntWriter(typeId).setPosition(data.getOffset(idx())); + getTinyIntWriter(typeId).writeTinyInt(value); + } + + private UInt1Writer getUInt1Writer(byte typeId) { + UInt1Writer writer = (UInt1Writer) writers[typeId]; + if (writer == null) { + writer = new UInt1WriterImpl((UInt1Vector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public UInt1Writer asUInt1(byte typeId) { + data.setTypeId(idx(), typeId); + return getUInt1Writer(typeId); + } + + @Override + public void write(UInt1Holder holder) { + throw new UnsupportedOperationException(); + } + + public void writeUInt1(byte value, byte typeId) { + data.setTypeId(idx(), typeId); + getUInt1Writer(typeId).setPosition(data.getOffset(idx())); + getUInt1Writer(typeId).writeUInt1(value); + } + + private UInt2Writer getUInt2Writer(byte typeId) { + UInt2Writer writer = (UInt2Writer) writers[typeId]; + if (writer == null) { + writer = new UInt2WriterImpl((UInt2Vector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public UInt2Writer asUInt2(byte typeId) { + data.setTypeId(idx(), typeId); + return getUInt2Writer(typeId); + } + + @Override + public void write(UInt2Holder holder) { + throw new UnsupportedOperationException(); + } + + public void writeUInt2(char value, byte typeId) { + data.setTypeId(idx(), typeId); + getUInt2Writer(typeId).setPosition(data.getOffset(idx())); + getUInt2Writer(typeId).writeUInt2(value); + } + + private SmallIntWriter getSmallIntWriter(byte typeId) { + SmallIntWriter writer = (SmallIntWriter) writers[typeId]; + if (writer == null) { + writer = new SmallIntWriterImpl((SmallIntVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public SmallIntWriter asSmallInt(byte typeId) { + data.setTypeId(idx(), typeId); + return getSmallIntWriter(typeId); + } + + @Override + public void write(SmallIntHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeSmallInt(short value, byte typeId) { + data.setTypeId(idx(), typeId); + getSmallIntWriter(typeId).setPosition(data.getOffset(idx())); + getSmallIntWriter(typeId).writeSmallInt(value); + } + + private Float2Writer getFloat2Writer(byte typeId) { + Float2Writer writer = (Float2Writer) writers[typeId]; + if (writer == null) { + writer = new Float2WriterImpl((Float2Vector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public Float2Writer asFloat2(byte typeId) { + data.setTypeId(idx(), typeId); + return getFloat2Writer(typeId); + } + + @Override + public void write(Float2Holder holder) { + throw new UnsupportedOperationException(); + } + + public void writeFloat2(short value, byte typeId) { + data.setTypeId(idx(), typeId); + getFloat2Writer(typeId).setPosition(data.getOffset(idx())); + getFloat2Writer(typeId).writeFloat2(value); + } + + private IntWriter getIntWriter(byte typeId) { + IntWriter writer = (IntWriter) writers[typeId]; + if (writer == null) { + writer = new IntWriterImpl((IntVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public IntWriter asInt(byte typeId) { + data.setTypeId(idx(), typeId); + return getIntWriter(typeId); + } + + @Override + public void write(IntHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeInt(int value, byte typeId) { + data.setTypeId(idx(), typeId); + getIntWriter(typeId).setPosition(data.getOffset(idx())); + getIntWriter(typeId).writeInt(value); + } + + private UInt4Writer getUInt4Writer(byte typeId) { + UInt4Writer writer = (UInt4Writer) writers[typeId]; + if (writer == null) { + writer = new UInt4WriterImpl((UInt4Vector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public UInt4Writer asUInt4(byte typeId) { + data.setTypeId(idx(), typeId); + return getUInt4Writer(typeId); + } + + @Override + public void write(UInt4Holder holder) { + throw new UnsupportedOperationException(); + } + + public void writeUInt4(int value, byte typeId) { + data.setTypeId(idx(), typeId); + getUInt4Writer(typeId).setPosition(data.getOffset(idx())); + getUInt4Writer(typeId).writeUInt4(value); + } + + private Float4Writer getFloat4Writer(byte typeId) { + Float4Writer writer = (Float4Writer) writers[typeId]; + if (writer == null) { + writer = new Float4WriterImpl((Float4Vector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public Float4Writer asFloat4(byte typeId) { + data.setTypeId(idx(), typeId); + return getFloat4Writer(typeId); + } + + @Override + public void write(Float4Holder holder) { + throw new UnsupportedOperationException(); + } + + public void writeFloat4(float value, byte typeId) { + data.setTypeId(idx(), typeId); + getFloat4Writer(typeId).setPosition(data.getOffset(idx())); + getFloat4Writer(typeId).writeFloat4(value); + } + + private DateDayWriter getDateDayWriter(byte typeId) { + DateDayWriter writer = (DateDayWriter) writers[typeId]; + if (writer == null) { + writer = new DateDayWriterImpl((DateDayVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public DateDayWriter asDateDay(byte typeId) { + data.setTypeId(idx(), typeId); + return getDateDayWriter(typeId); + } + + @Override + public void write(DateDayHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeDateDay(int value, byte typeId) { + data.setTypeId(idx(), typeId); + getDateDayWriter(typeId).setPosition(data.getOffset(idx())); + getDateDayWriter(typeId).writeDateDay(value); + } + + private IntervalYearWriter getIntervalYearWriter(byte typeId) { + IntervalYearWriter writer = (IntervalYearWriter) writers[typeId]; + if (writer == null) { + writer = new IntervalYearWriterImpl((IntervalYearVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public IntervalYearWriter asIntervalYear(byte typeId) { + data.setTypeId(idx(), typeId); + return getIntervalYearWriter(typeId); + } + + @Override + public void write(IntervalYearHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeIntervalYear(int value, byte typeId) { + data.setTypeId(idx(), typeId); + getIntervalYearWriter(typeId).setPosition(data.getOffset(idx())); + getIntervalYearWriter(typeId).writeIntervalYear(value); + } + + private TimeSecWriter getTimeSecWriter(byte typeId) { + TimeSecWriter writer = (TimeSecWriter) writers[typeId]; + if (writer == null) { + writer = new TimeSecWriterImpl((TimeSecVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public TimeSecWriter asTimeSec(byte typeId) { + data.setTypeId(idx(), typeId); + return getTimeSecWriter(typeId); + } + + @Override + public void write(TimeSecHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeTimeSec(int value, byte typeId) { + data.setTypeId(idx(), typeId); + getTimeSecWriter(typeId).setPosition(data.getOffset(idx())); + getTimeSecWriter(typeId).writeTimeSec(value); + } + + private TimeMilliWriter getTimeMilliWriter(byte typeId) { + TimeMilliWriter writer = (TimeMilliWriter) writers[typeId]; + if (writer == null) { + writer = new TimeMilliWriterImpl((TimeMilliVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public TimeMilliWriter asTimeMilli(byte typeId) { + data.setTypeId(idx(), typeId); + return getTimeMilliWriter(typeId); + } + + @Override + public void write(TimeMilliHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeTimeMilli(int value, byte typeId) { + data.setTypeId(idx(), typeId); + getTimeMilliWriter(typeId).setPosition(data.getOffset(idx())); + getTimeMilliWriter(typeId).writeTimeMilli(value); + } + + private BigIntWriter getBigIntWriter(byte typeId) { + BigIntWriter writer = (BigIntWriter) writers[typeId]; + if (writer == null) { + writer = new BigIntWriterImpl((BigIntVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public BigIntWriter asBigInt(byte typeId) { + data.setTypeId(idx(), typeId); + return getBigIntWriter(typeId); + } + + @Override + public void write(BigIntHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeBigInt(long value, byte typeId) { + data.setTypeId(idx(), typeId); + getBigIntWriter(typeId).setPosition(data.getOffset(idx())); + getBigIntWriter(typeId).writeBigInt(value); + } + + private UInt8Writer getUInt8Writer(byte typeId) { + UInt8Writer writer = (UInt8Writer) writers[typeId]; + if (writer == null) { + writer = new UInt8WriterImpl((UInt8Vector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public UInt8Writer asUInt8(byte typeId) { + data.setTypeId(idx(), typeId); + return getUInt8Writer(typeId); + } + + @Override + public void write(UInt8Holder holder) { + throw new UnsupportedOperationException(); + } + + public void writeUInt8(long value, byte typeId) { + data.setTypeId(idx(), typeId); + getUInt8Writer(typeId).setPosition(data.getOffset(idx())); + getUInt8Writer(typeId).writeUInt8(value); + } + + private Float8Writer getFloat8Writer(byte typeId) { + Float8Writer writer = (Float8Writer) writers[typeId]; + if (writer == null) { + writer = new Float8WriterImpl((Float8Vector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public Float8Writer asFloat8(byte typeId) { + data.setTypeId(idx(), typeId); + return getFloat8Writer(typeId); + } + + @Override + public void write(Float8Holder holder) { + throw new UnsupportedOperationException(); + } + + public void writeFloat8(double value, byte typeId) { + data.setTypeId(idx(), typeId); + getFloat8Writer(typeId).setPosition(data.getOffset(idx())); + getFloat8Writer(typeId).writeFloat8(value); + } + + private DateMilliWriter getDateMilliWriter(byte typeId) { + DateMilliWriter writer = (DateMilliWriter) writers[typeId]; + if (writer == null) { + writer = new DateMilliWriterImpl((DateMilliVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public DateMilliWriter asDateMilli(byte typeId) { + data.setTypeId(idx(), typeId); + return getDateMilliWriter(typeId); + } + + @Override + public void write(DateMilliHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeDateMilli(long value, byte typeId) { + data.setTypeId(idx(), typeId); + getDateMilliWriter(typeId).setPosition(data.getOffset(idx())); + getDateMilliWriter(typeId).writeDateMilli(value); + } + + private TimeStampSecWriter getTimeStampSecWriter(byte typeId) { + TimeStampSecWriter writer = (TimeStampSecWriter) writers[typeId]; + if (writer == null) { + writer = new TimeStampSecWriterImpl((TimeStampSecVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public TimeStampSecWriter asTimeStampSec(byte typeId) { + data.setTypeId(idx(), typeId); + return getTimeStampSecWriter(typeId); + } + + @Override + public void write(TimeStampSecHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeTimeStampSec(long value, byte typeId) { + data.setTypeId(idx(), typeId); + getTimeStampSecWriter(typeId).setPosition(data.getOffset(idx())); + getTimeStampSecWriter(typeId).writeTimeStampSec(value); + } + + private TimeStampMilliWriter getTimeStampMilliWriter(byte typeId) { + TimeStampMilliWriter writer = (TimeStampMilliWriter) writers[typeId]; + if (writer == null) { + writer = new TimeStampMilliWriterImpl((TimeStampMilliVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public TimeStampMilliWriter asTimeStampMilli(byte typeId) { + data.setTypeId(idx(), typeId); + return getTimeStampMilliWriter(typeId); + } + + @Override + public void write(TimeStampMilliHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeTimeStampMilli(long value, byte typeId) { + data.setTypeId(idx(), typeId); + getTimeStampMilliWriter(typeId).setPosition(data.getOffset(idx())); + getTimeStampMilliWriter(typeId).writeTimeStampMilli(value); + } + + private TimeStampMicroWriter getTimeStampMicroWriter(byte typeId) { + TimeStampMicroWriter writer = (TimeStampMicroWriter) writers[typeId]; + if (writer == null) { + writer = new TimeStampMicroWriterImpl((TimeStampMicroVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public TimeStampMicroWriter asTimeStampMicro(byte typeId) { + data.setTypeId(idx(), typeId); + return getTimeStampMicroWriter(typeId); + } + + @Override + public void write(TimeStampMicroHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeTimeStampMicro(long value, byte typeId) { + data.setTypeId(idx(), typeId); + getTimeStampMicroWriter(typeId).setPosition(data.getOffset(idx())); + getTimeStampMicroWriter(typeId).writeTimeStampMicro(value); + } + + private TimeStampNanoWriter getTimeStampNanoWriter(byte typeId) { + TimeStampNanoWriter writer = (TimeStampNanoWriter) writers[typeId]; + if (writer == null) { + writer = new TimeStampNanoWriterImpl((TimeStampNanoVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public TimeStampNanoWriter asTimeStampNano(byte typeId) { + data.setTypeId(idx(), typeId); + return getTimeStampNanoWriter(typeId); + } + + @Override + public void write(TimeStampNanoHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeTimeStampNano(long value, byte typeId) { + data.setTypeId(idx(), typeId); + getTimeStampNanoWriter(typeId).setPosition(data.getOffset(idx())); + getTimeStampNanoWriter(typeId).writeTimeStampNano(value); + } + + private TimeMicroWriter getTimeMicroWriter(byte typeId) { + TimeMicroWriter writer = (TimeMicroWriter) writers[typeId]; + if (writer == null) { + writer = new TimeMicroWriterImpl((TimeMicroVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public TimeMicroWriter asTimeMicro(byte typeId) { + data.setTypeId(idx(), typeId); + return getTimeMicroWriter(typeId); + } + + @Override + public void write(TimeMicroHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeTimeMicro(long value, byte typeId) { + data.setTypeId(idx(), typeId); + getTimeMicroWriter(typeId).setPosition(data.getOffset(idx())); + getTimeMicroWriter(typeId).writeTimeMicro(value); + } + + private TimeNanoWriter getTimeNanoWriter(byte typeId) { + TimeNanoWriter writer = (TimeNanoWriter) writers[typeId]; + if (writer == null) { + writer = new TimeNanoWriterImpl((TimeNanoVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public TimeNanoWriter asTimeNano(byte typeId) { + data.setTypeId(idx(), typeId); + return getTimeNanoWriter(typeId); + } + + @Override + public void write(TimeNanoHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeTimeNano(long value, byte typeId) { + data.setTypeId(idx(), typeId); + getTimeNanoWriter(typeId).setPosition(data.getOffset(idx())); + getTimeNanoWriter(typeId).writeTimeNano(value); + } + + private IntervalDayWriter getIntervalDayWriter(byte typeId) { + IntervalDayWriter writer = (IntervalDayWriter) writers[typeId]; + if (writer == null) { + writer = new IntervalDayWriterImpl((IntervalDayVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public IntervalDayWriter asIntervalDay(byte typeId) { + data.setTypeId(idx(), typeId); + return getIntervalDayWriter(typeId); + } + + @Override + public void write(IntervalDayHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeIntervalDay(int days, int milliseconds, byte typeId) { + data.setTypeId(idx(), typeId); + getIntervalDayWriter(typeId).setPosition(data.getOffset(idx())); + getIntervalDayWriter(typeId).writeIntervalDay(days, milliseconds); + } + + private IntervalMonthDayNanoWriter getIntervalMonthDayNanoWriter(byte typeId) { + IntervalMonthDayNanoWriter writer = (IntervalMonthDayNanoWriter) writers[typeId]; + if (writer == null) { + writer = new IntervalMonthDayNanoWriterImpl((IntervalMonthDayNanoVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public IntervalMonthDayNanoWriter asIntervalMonthDayNano(byte typeId) { + data.setTypeId(idx(), typeId); + return getIntervalMonthDayNanoWriter(typeId); + } + + @Override + public void write(IntervalMonthDayNanoHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeIntervalMonthDayNano(int months, int days, long nanoseconds, byte typeId) { + data.setTypeId(idx(), typeId); + getIntervalMonthDayNanoWriter(typeId).setPosition(data.getOffset(idx())); + getIntervalMonthDayNanoWriter(typeId).writeIntervalMonthDayNano(months, days, nanoseconds); + } + + private Decimal256Writer getDecimal256Writer(byte typeId) { + Decimal256Writer writer = (Decimal256Writer) writers[typeId]; + if (writer == null) { + writer = new Decimal256WriterImpl((Decimal256Vector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public Decimal256Writer asDecimal256(byte typeId) { + data.setTypeId(idx(), typeId); + return getDecimal256Writer(typeId); + } + + @Override + public void write(Decimal256Holder holder) { + throw new UnsupportedOperationException(); + } + + public void writeDecimal256(long start, ArrowBuf buffer, byte typeId, ArrowType arrowType) { + data.setTypeId(idx(), typeId); + getDecimal256Writer(typeId).setPosition(data.getOffset(idx())); + getDecimal256Writer(typeId).writeDecimal256(start, buffer, arrowType); + } + + private DecimalWriter getDecimalWriter(byte typeId) { + DecimalWriter writer = (DecimalWriter) writers[typeId]; + if (writer == null) { + writer = new DecimalWriterImpl((DecimalVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public DecimalWriter asDecimal(byte typeId) { + data.setTypeId(idx(), typeId); + return getDecimalWriter(typeId); + } + + @Override + public void write(DecimalHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeDecimal(long start, ArrowBuf buffer, byte typeId, ArrowType arrowType) { + data.setTypeId(idx(), typeId); + getDecimalWriter(typeId).setPosition(data.getOffset(idx())); + getDecimalWriter(typeId).writeDecimal(start, buffer, arrowType); + } + + private VarBinaryWriter getVarBinaryWriter(byte typeId) { + VarBinaryWriter writer = (VarBinaryWriter) writers[typeId]; + if (writer == null) { + writer = new VarBinaryWriterImpl((VarBinaryVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public VarBinaryWriter asVarBinary(byte typeId) { + data.setTypeId(idx(), typeId); + return getVarBinaryWriter(typeId); + } + + @Override + public void write(VarBinaryHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeVarBinary(int start, int end, ArrowBuf buffer, byte typeId) { + data.setTypeId(idx(), typeId); + getVarBinaryWriter(typeId).setPosition(data.getOffset(idx())); + getVarBinaryWriter(typeId).writeVarBinary(start, end, buffer); + } + + private VarCharWriter getVarCharWriter(byte typeId) { + VarCharWriter writer = (VarCharWriter) writers[typeId]; + if (writer == null) { + writer = new VarCharWriterImpl((VarCharVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public VarCharWriter asVarChar(byte typeId) { + data.setTypeId(idx(), typeId); + return getVarCharWriter(typeId); + } + + @Override + public void write(VarCharHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeVarChar(int start, int end, ArrowBuf buffer, byte typeId) { + data.setTypeId(idx(), typeId); + getVarCharWriter(typeId).setPosition(data.getOffset(idx())); + getVarCharWriter(typeId).writeVarChar(start, end, buffer); + } + + private LargeVarCharWriter getLargeVarCharWriter(byte typeId) { + LargeVarCharWriter writer = (LargeVarCharWriter) writers[typeId]; + if (writer == null) { + writer = new LargeVarCharWriterImpl((LargeVarCharVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public LargeVarCharWriter asLargeVarChar(byte typeId) { + data.setTypeId(idx(), typeId); + return getLargeVarCharWriter(typeId); + } + + @Override + public void write(LargeVarCharHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeLargeVarChar(long start, long end, ArrowBuf buffer, byte typeId) { + data.setTypeId(idx(), typeId); + getLargeVarCharWriter(typeId).setPosition(data.getOffset(idx())); + getLargeVarCharWriter(typeId).writeLargeVarChar(start, end, buffer); + } + + private LargeVarBinaryWriter getLargeVarBinaryWriter(byte typeId) { + LargeVarBinaryWriter writer = (LargeVarBinaryWriter) writers[typeId]; + if (writer == null) { + writer = new LargeVarBinaryWriterImpl((LargeVarBinaryVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public LargeVarBinaryWriter asLargeVarBinary(byte typeId) { + data.setTypeId(idx(), typeId); + return getLargeVarBinaryWriter(typeId); + } + + @Override + public void write(LargeVarBinaryHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeLargeVarBinary(long start, long end, ArrowBuf buffer, byte typeId) { + data.setTypeId(idx(), typeId); + getLargeVarBinaryWriter(typeId).setPosition(data.getOffset(idx())); + getLargeVarBinaryWriter(typeId).writeLargeVarBinary(start, end, buffer); + } + + private BitWriter getBitWriter(byte typeId) { + BitWriter writer = (BitWriter) writers[typeId]; + if (writer == null) { + writer = new BitWriterImpl((BitVector) data.getVectorByType(typeId)); + writers[typeId] = writer; + } + return writer; + } + + public BitWriter asBit(byte typeId) { + data.setTypeId(idx(), typeId); + return getBitWriter(typeId); + } + + @Override + public void write(BitHolder holder) { + throw new UnsupportedOperationException(); + } + + public void writeBit(int value, byte typeId) { + data.setTypeId(idx(), typeId); + getBitWriter(typeId).setPosition(data.getOffset(idx())); + getBitWriter(typeId).writeBit(value); + } + + public void writeNull() { + } + + @Override + public StructWriter struct() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).struct(); + } + + @Override + public ListWriter list() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).list(); + } + + @Override + public ListWriter list(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).list(name); + } + + @Override + public MapWriter map() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getMapWriter(typeId).map(); + } + + @Override + public MapWriter map(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).map(name); + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).map(name, keysSorted); + } + + @Override + public StructWriter struct(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).struct(name); + } + + @Override + public TinyIntWriter tinyInt(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).tinyInt(name); + } + + @Override + public TinyIntWriter tinyInt() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).tinyInt(); + } + @Override + public UInt1Writer uInt1(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).uInt1(name); + } + + @Override + public UInt1Writer uInt1() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).uInt1(); + } + @Override + public UInt2Writer uInt2(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).uInt2(name); + } + + @Override + public UInt2Writer uInt2() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).uInt2(); + } + @Override + public SmallIntWriter smallInt(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).smallInt(name); + } + + @Override + public SmallIntWriter smallInt() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).smallInt(); + } + @Override + public Float2Writer float2(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).float2(name); + } + + @Override + public Float2Writer float2() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).float2(); + } + @Override + public IntWriter integer(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).integer(name); + } + + @Override + public IntWriter integer() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).integer(); + } + @Override + public UInt4Writer uInt4(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).uInt4(name); + } + + @Override + public UInt4Writer uInt4() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).uInt4(); + } + @Override + public Float4Writer float4(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).float4(name); + } + + @Override + public Float4Writer float4() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).float4(); + } + @Override + public DateDayWriter dateDay(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).dateDay(name); + } + + @Override + public DateDayWriter dateDay() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).dateDay(); + } + @Override + public IntervalYearWriter intervalYear(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).intervalYear(name); + } + + @Override + public IntervalYearWriter intervalYear() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).intervalYear(); + } + @Override + public TimeSecWriter timeSec(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).timeSec(name); + } + + @Override + public TimeSecWriter timeSec() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).timeSec(); + } + @Override + public TimeMilliWriter timeMilli(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).timeMilli(name); + } + + @Override + public TimeMilliWriter timeMilli() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).timeMilli(); + } + @Override + public BigIntWriter bigInt(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).bigInt(name); + } + + @Override + public BigIntWriter bigInt() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).bigInt(); + } + @Override + public UInt8Writer uInt8(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).uInt8(name); + } + + @Override + public UInt8Writer uInt8() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).uInt8(); + } + @Override + public Float8Writer float8(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).float8(name); + } + + @Override + public Float8Writer float8() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).float8(); + } + @Override + public DateMilliWriter dateMilli(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).dateMilli(name); + } + + @Override + public DateMilliWriter dateMilli() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).dateMilli(); + } + @Override + public TimeStampSecWriter timeStampSec(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).timeStampSec(name); + } + + @Override + public TimeStampSecWriter timeStampSec() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).timeStampSec(); + } + @Override + public TimeStampMilliWriter timeStampMilli(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).timeStampMilli(name); + } + + @Override + public TimeStampMilliWriter timeStampMilli() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).timeStampMilli(); + } + @Override + public TimeStampMicroWriter timeStampMicro(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).timeStampMicro(name); + } + + @Override + public TimeStampMicroWriter timeStampMicro() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).timeStampMicro(); + } + @Override + public TimeStampNanoWriter timeStampNano(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).timeStampNano(name); + } + + @Override + public TimeStampNanoWriter timeStampNano() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).timeStampNano(); + } + @Override + public TimeMicroWriter timeMicro(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).timeMicro(name); + } + + @Override + public TimeMicroWriter timeMicro() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).timeMicro(); + } + @Override + public TimeNanoWriter timeNano(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).timeNano(name); + } + + @Override + public TimeNanoWriter timeNano() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).timeNano(); + } + @Override + public IntervalDayWriter intervalDay(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).intervalDay(name); + } + + @Override + public IntervalDayWriter intervalDay() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).intervalDay(); + } + @Override + public IntervalMonthDayNanoWriter intervalMonthDayNano(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).intervalMonthDayNano(name); + } + + @Override + public IntervalMonthDayNanoWriter intervalMonthDayNano() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).intervalMonthDayNano(); + } + @Override + public Decimal256Writer decimal256(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).decimal256(name); + } + + @Override + public Decimal256Writer decimal256() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).decimal256(); + } + public Decimal256Writer decimal256(String name, int scale, int precision) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).decimal256(name, scale, precision); + } + @Override + public DecimalWriter decimal(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).decimal(name); + } + + @Override + public DecimalWriter decimal() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).decimal(); + } + public DecimalWriter decimal(String name, int scale, int precision) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).decimal(name, scale, precision); + } + @Override + public VarBinaryWriter varBinary(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).varBinary(name); + } + + @Override + public VarBinaryWriter varBinary() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).varBinary(); + } + @Override + public VarCharWriter varChar(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).varChar(name); + } + + @Override + public VarCharWriter varChar() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).varChar(); + } + @Override + public LargeVarCharWriter largeVarChar(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).largeVarChar(name); + } + + @Override + public LargeVarCharWriter largeVarChar() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).largeVarChar(); + } + @Override + public LargeVarBinaryWriter largeVarBinary(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).largeVarBinary(name); + } + + @Override + public LargeVarBinaryWriter largeVarBinary() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).largeVarBinary(); + } + @Override + public BitWriter bit(String name) { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getStructWriter(typeId).setPosition(data.getOffset(idx())); + return getStructWriter(typeId).bit(name); + } + + @Override + public BitWriter bit() { + byte typeId = data.getTypeId(idx()); + data.setTypeId(idx(), typeId); + getListWriter(typeId).setPosition(data.getOffset(idx())); + return getListWriter(typeId).bit(); + } + + @Override + public void allocate() { + data.allocateNew(); + } + + @Override + public void clear() { + data.clear(); + } + + @Override + public void close() throws Exception { + data.close(); + } + + @Override + public Field getField() { + return data.getField(); + } + + @Override + public int getValueCapacity() { + return data.getValueCapacity(); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DurationHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DurationHolderReaderImpl.java new file mode 100644 index 000000000000..7348e02d9284 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DurationHolderReaderImpl.java @@ -0,0 +1,128 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class DurationHolderReaderImpl extends AbstractFieldReader { + + private DurationHolder holder; + public DurationHolderReaderImpl(DurationHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.DURATION; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(DurationHolder h) { + h.value = holder.value; + h.unit = holder.unit; + } + + @Override + public void read(NullableDurationHolder h) { + h.value = holder.value; + h.unit = holder.unit; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Duration readDuration() { + + return DurationVector.toDuration(holder.value, holder.unit); + } + + @Override + public Object readObject() { + return readDuration(); + } + + public void copyAsValue(DurationWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DurationReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DurationReaderImpl.java new file mode 100644 index 000000000000..1b4e0447007e --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DurationReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class DurationReaderImpl extends AbstractFieldReader { + + private final DurationVector vector; + + public DurationReaderImpl(DurationVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(DurationWriter writer){ + DurationWriterImpl impl = (DurationWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + DurationWriterImpl impl = (DurationWriterImpl) writer.duration(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableDurationHolder h){ + vector.get(idx(), h); + } + + public Duration readDuration(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DurationWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DurationWriterImpl.java new file mode 100644 index 000000000000..61682be95f02 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/DurationWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class DurationWriterImpl extends AbstractFieldWriter { + + final DurationVector vector; + + +public DurationWriterImpl(DurationVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(DurationHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableDurationHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeDuration(long value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/FixedSizeBinaryHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/FixedSizeBinaryHolderReaderImpl.java new file mode 100644 index 000000000000..981f3a3009b5 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/FixedSizeBinaryHolderReaderImpl.java @@ -0,0 +1,130 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class FixedSizeBinaryHolderReaderImpl extends AbstractFieldReader { + + private FixedSizeBinaryHolder holder; + public FixedSizeBinaryHolderReaderImpl(FixedSizeBinaryHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.FIXEDSIZEBINARY; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(FixedSizeBinaryHolder h) { + h.buffer = holder.buffer; + h.byteWidth = holder.byteWidth; + } + + @Override + public void read(NullableFixedSizeBinaryHolder h) { + h.buffer = holder.buffer; + h.byteWidth = holder.byteWidth; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public byte[] readByteArray() { + + byte[] value = new byte [holder.byteWidth]; + holder.buffer.getBytes(0, value, 0, holder.byteWidth); + return value; + } + + @Override + public Object readObject() { + return readByteArray(); + } + + public void copyAsValue(FixedSizeBinaryWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/FixedSizeBinaryReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/FixedSizeBinaryReaderImpl.java new file mode 100644 index 000000000000..038fd2cfdc71 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/FixedSizeBinaryReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class FixedSizeBinaryReaderImpl extends AbstractFieldReader { + + private final FixedSizeBinaryVector vector; + + public FixedSizeBinaryReaderImpl(FixedSizeBinaryVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(FixedSizeBinaryWriter writer){ + FixedSizeBinaryWriterImpl impl = (FixedSizeBinaryWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + FixedSizeBinaryWriterImpl impl = (FixedSizeBinaryWriterImpl) writer.fixedSizeBinary(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableFixedSizeBinaryHolder h){ + vector.get(idx(), h); + } + + public byte[] readByteArray(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/FixedSizeBinaryWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/FixedSizeBinaryWriterImpl.java new file mode 100644 index 000000000000..9201311c8409 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/FixedSizeBinaryWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class FixedSizeBinaryWriterImpl extends AbstractFieldWriter { + + final FixedSizeBinaryVector vector; + + +public FixedSizeBinaryWriterImpl(FixedSizeBinaryVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(FixedSizeBinaryHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableFixedSizeBinaryHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeFixedSizeBinary(ArrowBuf buffer) { + vector.setSafe(idx(), 1, buffer); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float2HolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float2HolderReaderImpl.java new file mode 100644 index 000000000000..729483f06e5a --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float2HolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class Float2HolderReaderImpl extends AbstractFieldReader { + + private Float2Holder holder; + public Float2HolderReaderImpl(Float2Holder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.FLOAT2; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(Float2Holder h) { + h.value = holder.value; + } + + @Override + public void read(NullableFloat2Holder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Short readShort() { + + Short value = new Short(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readShort(); + } + + public void copyAsValue(Float2Writer writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float2ReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float2ReaderImpl.java new file mode 100644 index 000000000000..a56e6b647239 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float2ReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class Float2ReaderImpl extends AbstractFieldReader { + + private final Float2Vector vector; + + public Float2ReaderImpl(Float2Vector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(Float2Writer writer){ + Float2WriterImpl impl = (Float2WriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + Float2WriterImpl impl = (Float2WriterImpl) writer.float2(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableFloat2Holder h){ + vector.get(idx(), h); + } + + public Short readShort(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float2WriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float2WriterImpl.java new file mode 100644 index 000000000000..4dcd1ea9a95e --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float2WriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class Float2WriterImpl extends AbstractFieldWriter { + + final Float2Vector vector; + + +public Float2WriterImpl(Float2Vector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(Float2Holder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableFloat2Holder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeFloat2(short value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float4HolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float4HolderReaderImpl.java new file mode 100644 index 000000000000..d300eb3d70fc --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float4HolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class Float4HolderReaderImpl extends AbstractFieldReader { + + private Float4Holder holder; + public Float4HolderReaderImpl(Float4Holder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.FLOAT4; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(Float4Holder h) { + h.value = holder.value; + } + + @Override + public void read(NullableFloat4Holder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Float readFloat() { + + Float value = new Float(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readFloat(); + } + + public void copyAsValue(Float4Writer writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float4ReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float4ReaderImpl.java new file mode 100644 index 000000000000..1169505da60c --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float4ReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class Float4ReaderImpl extends AbstractFieldReader { + + private final Float4Vector vector; + + public Float4ReaderImpl(Float4Vector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(Float4Writer writer){ + Float4WriterImpl impl = (Float4WriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + Float4WriterImpl impl = (Float4WriterImpl) writer.float4(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableFloat4Holder h){ + vector.get(idx(), h); + } + + public Float readFloat(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float4WriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float4WriterImpl.java new file mode 100644 index 000000000000..477f95a5ba0a --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float4WriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class Float4WriterImpl extends AbstractFieldWriter { + + final Float4Vector vector; + + +public Float4WriterImpl(Float4Vector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(Float4Holder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableFloat4Holder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeFloat4(float value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float8HolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float8HolderReaderImpl.java new file mode 100644 index 000000000000..a44cc30bfea1 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float8HolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class Float8HolderReaderImpl extends AbstractFieldReader { + + private Float8Holder holder; + public Float8HolderReaderImpl(Float8Holder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.FLOAT8; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(Float8Holder h) { + h.value = holder.value; + } + + @Override + public void read(NullableFloat8Holder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Double readDouble() { + + Double value = new Double(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readDouble(); + } + + public void copyAsValue(Float8Writer writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float8ReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float8ReaderImpl.java new file mode 100644 index 000000000000..ae37392de833 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float8ReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class Float8ReaderImpl extends AbstractFieldReader { + + private final Float8Vector vector; + + public Float8ReaderImpl(Float8Vector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(Float8Writer writer){ + Float8WriterImpl impl = (Float8WriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + Float8WriterImpl impl = (Float8WriterImpl) writer.float8(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableFloat8Holder h){ + vector.get(idx(), h); + } + + public Double readDouble(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float8WriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float8WriterImpl.java new file mode 100644 index 000000000000..bc64f3f70264 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/Float8WriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class Float8WriterImpl extends AbstractFieldWriter { + + final Float8Vector vector; + + +public Float8WriterImpl(Float8Vector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(Float8Holder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableFloat8Holder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeFloat8(double value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntHolderReaderImpl.java new file mode 100644 index 000000000000..1804db8e3e20 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntHolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class IntHolderReaderImpl extends AbstractFieldReader { + + private IntHolder holder; + public IntHolderReaderImpl(IntHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.INT; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(IntHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableIntHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Integer readInteger() { + + Integer value = new Integer(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readInteger(); + } + + public void copyAsValue(IntWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntReaderImpl.java new file mode 100644 index 000000000000..fbfb5e8db80b --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class IntReaderImpl extends AbstractFieldReader { + + private final IntVector vector; + + public IntReaderImpl(IntVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(IntWriter writer){ + IntWriterImpl impl = (IntWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + IntWriterImpl impl = (IntWriterImpl) writer.integer(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableIntHolder h){ + vector.get(idx(), h); + } + + public Integer readInteger(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntWriterImpl.java new file mode 100644 index 000000000000..91e9696848d9 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class IntWriterImpl extends AbstractFieldWriter { + + final IntVector vector; + + +public IntWriterImpl(IntVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(IntHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableIntHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeInt(int value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalDayHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalDayHolderReaderImpl.java new file mode 100644 index 000000000000..7a14e8c70226 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalDayHolderReaderImpl.java @@ -0,0 +1,128 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class IntervalDayHolderReaderImpl extends AbstractFieldReader { + + private IntervalDayHolder holder; + public IntervalDayHolderReaderImpl(IntervalDayHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.INTERVALDAY; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(IntervalDayHolder h) { + h.days = holder.days; + h.milliseconds = holder.milliseconds; + } + + @Override + public void read(NullableIntervalDayHolder h) { + h.days = holder.days; + h.milliseconds = holder.milliseconds; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Duration readDuration() { + + return Duration.ofDays(holder.days).plusMillis(holder.milliseconds); + } + + @Override + public Object readObject() { + return readDuration(); + } + + public void copyAsValue(IntervalDayWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalDayReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalDayReaderImpl.java new file mode 100644 index 000000000000..1a0e33d491d3 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalDayReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class IntervalDayReaderImpl extends AbstractFieldReader { + + private final IntervalDayVector vector; + + public IntervalDayReaderImpl(IntervalDayVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(IntervalDayWriter writer){ + IntervalDayWriterImpl impl = (IntervalDayWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + IntervalDayWriterImpl impl = (IntervalDayWriterImpl) writer.intervalDay(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableIntervalDayHolder h){ + vector.get(idx(), h); + } + + public Duration readDuration(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalDayWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalDayWriterImpl.java new file mode 100644 index 000000000000..f31cf988e77f --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalDayWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class IntervalDayWriterImpl extends AbstractFieldWriter { + + final IntervalDayVector vector; + + +public IntervalDayWriterImpl(IntervalDayVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(IntervalDayHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableIntervalDayHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeIntervalDay(int days, int milliseconds) { + vector.setSafe(idx(), 1, days, milliseconds); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalMonthDayNanoHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalMonthDayNanoHolderReaderImpl.java new file mode 100644 index 000000000000..db53e527945e --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalMonthDayNanoHolderReaderImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class IntervalMonthDayNanoHolderReaderImpl extends AbstractFieldReader { + + private IntervalMonthDayNanoHolder holder; + public IntervalMonthDayNanoHolderReaderImpl(IntervalMonthDayNanoHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.INTERVALMONTHDAYNANO; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(IntervalMonthDayNanoHolder h) { + h.months = holder.months; + h.days = holder.days; + h.nanoseconds = holder.nanoseconds; + } + + @Override + public void read(NullableIntervalMonthDayNanoHolder h) { + h.months = holder.months; + h.days = holder.days; + h.nanoseconds = holder.nanoseconds; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public PeriodDuration readPeriodDuration() { + + return new PeriodDuration(Period.ofMonths(holder.months).plusDays(holder.days), + Duration.ofNanos(holder.nanoseconds)); + } + + @Override + public Object readObject() { + return readPeriodDuration(); + } + + public void copyAsValue(IntervalMonthDayNanoWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalMonthDayNanoReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalMonthDayNanoReaderImpl.java new file mode 100644 index 000000000000..7d6a4d2331ac --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalMonthDayNanoReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class IntervalMonthDayNanoReaderImpl extends AbstractFieldReader { + + private final IntervalMonthDayNanoVector vector; + + public IntervalMonthDayNanoReaderImpl(IntervalMonthDayNanoVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(IntervalMonthDayNanoWriter writer){ + IntervalMonthDayNanoWriterImpl impl = (IntervalMonthDayNanoWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + IntervalMonthDayNanoWriterImpl impl = (IntervalMonthDayNanoWriterImpl) writer.intervalMonthDayNano(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableIntervalMonthDayNanoHolder h){ + vector.get(idx(), h); + } + + public PeriodDuration readPeriodDuration(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalMonthDayNanoWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalMonthDayNanoWriterImpl.java new file mode 100644 index 000000000000..e282041eb330 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalMonthDayNanoWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class IntervalMonthDayNanoWriterImpl extends AbstractFieldWriter { + + final IntervalMonthDayNanoVector vector; + + +public IntervalMonthDayNanoWriterImpl(IntervalMonthDayNanoVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(IntervalMonthDayNanoHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableIntervalMonthDayNanoHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeIntervalMonthDayNano(int months, int days, long nanoseconds) { + vector.setSafe(idx(), 1, months, days, nanoseconds); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalYearHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalYearHolderReaderImpl.java new file mode 100644 index 000000000000..a9b414c0f8ee --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalYearHolderReaderImpl.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class IntervalYearHolderReaderImpl extends AbstractFieldReader { + + private IntervalYearHolder holder; + public IntervalYearHolderReaderImpl(IntervalYearHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.INTERVALYEAR; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(IntervalYearHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableIntervalYearHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Period readPeriod() { + + return Period.ofMonths(holder.value); + } + + @Override + public Object readObject() { + return readPeriod(); + } + + public void copyAsValue(IntervalYearWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalYearReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalYearReaderImpl.java new file mode 100644 index 000000000000..9a4252b29409 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalYearReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class IntervalYearReaderImpl extends AbstractFieldReader { + + private final IntervalYearVector vector; + + public IntervalYearReaderImpl(IntervalYearVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(IntervalYearWriter writer){ + IntervalYearWriterImpl impl = (IntervalYearWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + IntervalYearWriterImpl impl = (IntervalYearWriterImpl) writer.intervalYear(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableIntervalYearHolder h){ + vector.get(idx(), h); + } + + public Period readPeriod(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalYearWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalYearWriterImpl.java new file mode 100644 index 000000000000..3cf14a9550fb --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/IntervalYearWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class IntervalYearWriterImpl extends AbstractFieldWriter { + + final IntervalYearVector vector; + + +public IntervalYearWriterImpl(IntervalYearVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(IntervalYearHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableIntervalYearHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeIntervalYear(int value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/LargeVarBinaryHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/LargeVarBinaryHolderReaderImpl.java new file mode 100644 index 000000000000..cee296abc0e9 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/LargeVarBinaryHolderReaderImpl.java @@ -0,0 +1,133 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class LargeVarBinaryHolderReaderImpl extends AbstractFieldReader { + + private LargeVarBinaryHolder holder; + public LargeVarBinaryHolderReaderImpl(LargeVarBinaryHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.LARGEVARBINARY; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(LargeVarBinaryHolder h) { + h.start = holder.start; + h.end = holder.end; + h.buffer = holder.buffer; + } + + @Override + public void read(NullableLargeVarBinaryHolder h) { + h.start = holder.start; + h.end = holder.end; + h.buffer = holder.buffer; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public byte[] readByteArray() { + + int length = (int) (holder.end - holder.start); + byte[] value = new byte [length]; + holder.buffer.getBytes(holder.start, value, 0, length); + return value; + } + + @Override + public Object readObject() { + return readByteArray(); + } + + public void copyAsValue(LargeVarBinaryWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/LargeVarBinaryReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/LargeVarBinaryReaderImpl.java new file mode 100644 index 000000000000..88686397dff5 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/LargeVarBinaryReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class LargeVarBinaryReaderImpl extends AbstractFieldReader { + + private final LargeVarBinaryVector vector; + + public LargeVarBinaryReaderImpl(LargeVarBinaryVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(LargeVarBinaryWriter writer){ + LargeVarBinaryWriterImpl impl = (LargeVarBinaryWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + LargeVarBinaryWriterImpl impl = (LargeVarBinaryWriterImpl) writer.largeVarBinary(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableLargeVarBinaryHolder h){ + vector.get(idx(), h); + } + + public byte[] readByteArray(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/LargeVarBinaryWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/LargeVarBinaryWriterImpl.java new file mode 100644 index 000000000000..7d744677ebfe --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/LargeVarBinaryWriterImpl.java @@ -0,0 +1,150 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class LargeVarBinaryWriterImpl extends AbstractFieldWriter { + + final LargeVarBinaryVector vector; + + +public LargeVarBinaryWriterImpl(LargeVarBinaryVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(LargeVarBinaryHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableLargeVarBinaryHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeLargeVarBinary(long start, long end, ArrowBuf buffer) { + vector.setSafe(idx(), 1, start, end, buffer); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + + public void writeLargeVarBinary(byte[] value) { + vector.setSafe(idx(), value); + vector.setValueCount(idx() + 1); + } + + public void writeLargeVarBinary(byte[] value, int offset, int length) { + vector.setSafe(idx(), value, offset, length); + vector.setValueCount(idx() + 1); + } + + public void writeLargeVarBinary(ByteBuffer value) { + vector.setSafe(idx(), value, 0, value.remaining()); + vector.setValueCount(idx() + 1); + } + + public void writeLargeVarBinary(ByteBuffer value, int offset, int length) { + vector.setSafe(idx(), value, offset, length); + vector.setValueCount(idx() + 1); + } +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/LargeVarCharHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/LargeVarCharHolderReaderImpl.java new file mode 100644 index 000000000000..91965dbf9949 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/LargeVarCharHolderReaderImpl.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class LargeVarCharHolderReaderImpl extends AbstractFieldReader { + + private LargeVarCharHolder holder; + public LargeVarCharHolderReaderImpl(LargeVarCharHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.LARGEVARCHAR; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(LargeVarCharHolder h) { + h.start = holder.start; + h.end = holder.end; + h.buffer = holder.buffer; + } + + @Override + public void read(NullableLargeVarCharHolder h) { + h.start = holder.start; + h.end = holder.end; + h.buffer = holder.buffer; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Text readText() { + + int length = (int) (holder.end - holder.start); + byte[] value = new byte [length]; + holder.buffer.getBytes(holder.start, value, 0, length); + Text text = new Text(); + text.set(value); + return text; + } + + @Override + public Object readObject() { + return readText(); + } + + public void copyAsValue(LargeVarCharWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/LargeVarCharReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/LargeVarCharReaderImpl.java new file mode 100644 index 000000000000..adb88dafb08d --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/LargeVarCharReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class LargeVarCharReaderImpl extends AbstractFieldReader { + + private final LargeVarCharVector vector; + + public LargeVarCharReaderImpl(LargeVarCharVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(LargeVarCharWriter writer){ + LargeVarCharWriterImpl impl = (LargeVarCharWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + LargeVarCharWriterImpl impl = (LargeVarCharWriterImpl) writer.largeVarChar(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableLargeVarCharHolder h){ + vector.get(idx(), h); + } + + public Text readText(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/LargeVarCharWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/LargeVarCharWriterImpl.java new file mode 100644 index 000000000000..9e3757e14157 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/LargeVarCharWriterImpl.java @@ -0,0 +1,144 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class LargeVarCharWriterImpl extends AbstractFieldWriter { + + final LargeVarCharVector vector; + + private final Text textBuffer = new Text(); + +public LargeVarCharWriterImpl(LargeVarCharVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(LargeVarCharHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableLargeVarCharHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeLargeVarChar(long start, long end, ArrowBuf buffer) { + vector.setSafe(idx(), 1, start, end, buffer); + vector.setValueCount(idx()+1); + } + + @Override + public void writeLargeVarChar(Text value) { + vector.setSafe(idx(), value); + vector.setValueCount(idx()+1); + } + + @Override + public void writeLargeVarChar(String value) { + textBuffer.set(value); + vector.setSafe(idx(), textBuffer); + vector.setValueCount(idx()+1); + } + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullReader.java new file mode 100644 index 000000000000..3d46fa27d364 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullReader.java @@ -0,0 +1,948 @@ + + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template NullReader.java + */ +@SuppressWarnings("unused") +public class NullReader extends AbstractBaseReader implements FieldReader{ + + public static final NullReader INSTANCE = new NullReader(); + public static final NullReader EMPTY_LIST_INSTANCE = new NullReader(MinorType.NULL); + public static final NullReader EMPTY_STRUCT_INSTANCE = new NullReader(MinorType.STRUCT); + private MinorType type; + + private NullReader(){ + super(); + type = MinorType.NULL; + } + + private NullReader(MinorType type){ + super(); + this.type = type; + } + + @Override + public MinorType getMinorType() { + return type; + } + + @Override + public Field getField() { + return new Field("", FieldType.nullable(new Null()), null); + } + + public void copyAsValue(StructWriter writer) {} + + public void copyAsValue(ListWriter writer) {} + + public void copyAsValue(UnionWriter writer) {} + + public void read(TinyIntHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableTinyIntHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, TinyIntHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(TinyIntWriter writer){} + public void copyAsField(String name, TinyIntWriter writer){} + + public void read(int arrayIndex, NullableTinyIntHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(UInt1Holder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableUInt1Holder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, UInt1Holder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(UInt1Writer writer){} + public void copyAsField(String name, UInt1Writer writer){} + + public void read(int arrayIndex, NullableUInt1Holder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(UInt2Holder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableUInt2Holder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, UInt2Holder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(UInt2Writer writer){} + public void copyAsField(String name, UInt2Writer writer){} + + public void read(int arrayIndex, NullableUInt2Holder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(SmallIntHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableSmallIntHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, SmallIntHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(SmallIntWriter writer){} + public void copyAsField(String name, SmallIntWriter writer){} + + public void read(int arrayIndex, NullableSmallIntHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(Float2Holder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableFloat2Holder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, Float2Holder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(Float2Writer writer){} + public void copyAsField(String name, Float2Writer writer){} + + public void read(int arrayIndex, NullableFloat2Holder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(IntHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableIntHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, IntHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(IntWriter writer){} + public void copyAsField(String name, IntWriter writer){} + + public void read(int arrayIndex, NullableIntHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(UInt4Holder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableUInt4Holder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, UInt4Holder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(UInt4Writer writer){} + public void copyAsField(String name, UInt4Writer writer){} + + public void read(int arrayIndex, NullableUInt4Holder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(Float4Holder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableFloat4Holder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, Float4Holder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(Float4Writer writer){} + public void copyAsField(String name, Float4Writer writer){} + + public void read(int arrayIndex, NullableFloat4Holder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(DateDayHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableDateDayHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, DateDayHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(DateDayWriter writer){} + public void copyAsField(String name, DateDayWriter writer){} + + public void read(int arrayIndex, NullableDateDayHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(IntervalYearHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableIntervalYearHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, IntervalYearHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(IntervalYearWriter writer){} + public void copyAsField(String name, IntervalYearWriter writer){} + + public void read(int arrayIndex, NullableIntervalYearHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(TimeSecHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableTimeSecHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, TimeSecHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(TimeSecWriter writer){} + public void copyAsField(String name, TimeSecWriter writer){} + + public void read(int arrayIndex, NullableTimeSecHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(TimeMilliHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableTimeMilliHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, TimeMilliHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(TimeMilliWriter writer){} + public void copyAsField(String name, TimeMilliWriter writer){} + + public void read(int arrayIndex, NullableTimeMilliHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(BigIntHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableBigIntHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, BigIntHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(BigIntWriter writer){} + public void copyAsField(String name, BigIntWriter writer){} + + public void read(int arrayIndex, NullableBigIntHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(UInt8Holder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableUInt8Holder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, UInt8Holder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(UInt8Writer writer){} + public void copyAsField(String name, UInt8Writer writer){} + + public void read(int arrayIndex, NullableUInt8Holder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(Float8Holder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableFloat8Holder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, Float8Holder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(Float8Writer writer){} + public void copyAsField(String name, Float8Writer writer){} + + public void read(int arrayIndex, NullableFloat8Holder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(DateMilliHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableDateMilliHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, DateMilliHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(DateMilliWriter writer){} + public void copyAsField(String name, DateMilliWriter writer){} + + public void read(int arrayIndex, NullableDateMilliHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(DurationHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableDurationHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, DurationHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(DurationWriter writer){} + public void copyAsField(String name, DurationWriter writer){} + + public void read(int arrayIndex, NullableDurationHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(TimeStampSecHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableTimeStampSecHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, TimeStampSecHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(TimeStampSecWriter writer){} + public void copyAsField(String name, TimeStampSecWriter writer){} + + public void read(int arrayIndex, NullableTimeStampSecHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(TimeStampMilliHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableTimeStampMilliHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, TimeStampMilliHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(TimeStampMilliWriter writer){} + public void copyAsField(String name, TimeStampMilliWriter writer){} + + public void read(int arrayIndex, NullableTimeStampMilliHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(TimeStampMicroHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableTimeStampMicroHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, TimeStampMicroHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(TimeStampMicroWriter writer){} + public void copyAsField(String name, TimeStampMicroWriter writer){} + + public void read(int arrayIndex, NullableTimeStampMicroHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(TimeStampNanoHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableTimeStampNanoHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, TimeStampNanoHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(TimeStampNanoWriter writer){} + public void copyAsField(String name, TimeStampNanoWriter writer){} + + public void read(int arrayIndex, NullableTimeStampNanoHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(TimeStampSecTZHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableTimeStampSecTZHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, TimeStampSecTZHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(TimeStampSecTZWriter writer){} + public void copyAsField(String name, TimeStampSecTZWriter writer){} + + public void read(int arrayIndex, NullableTimeStampSecTZHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(TimeStampMilliTZHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableTimeStampMilliTZHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, TimeStampMilliTZHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(TimeStampMilliTZWriter writer){} + public void copyAsField(String name, TimeStampMilliTZWriter writer){} + + public void read(int arrayIndex, NullableTimeStampMilliTZHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(TimeStampMicroTZHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableTimeStampMicroTZHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, TimeStampMicroTZHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(TimeStampMicroTZWriter writer){} + public void copyAsField(String name, TimeStampMicroTZWriter writer){} + + public void read(int arrayIndex, NullableTimeStampMicroTZHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(TimeStampNanoTZHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableTimeStampNanoTZHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, TimeStampNanoTZHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(TimeStampNanoTZWriter writer){} + public void copyAsField(String name, TimeStampNanoTZWriter writer){} + + public void read(int arrayIndex, NullableTimeStampNanoTZHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(TimeMicroHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableTimeMicroHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, TimeMicroHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(TimeMicroWriter writer){} + public void copyAsField(String name, TimeMicroWriter writer){} + + public void read(int arrayIndex, NullableTimeMicroHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(TimeNanoHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableTimeNanoHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, TimeNanoHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(TimeNanoWriter writer){} + public void copyAsField(String name, TimeNanoWriter writer){} + + public void read(int arrayIndex, NullableTimeNanoHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(IntervalDayHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableIntervalDayHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, IntervalDayHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(IntervalDayWriter writer){} + public void copyAsField(String name, IntervalDayWriter writer){} + + public void read(int arrayIndex, NullableIntervalDayHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(IntervalMonthDayNanoHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableIntervalMonthDayNanoHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, IntervalMonthDayNanoHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(IntervalMonthDayNanoWriter writer){} + public void copyAsField(String name, IntervalMonthDayNanoWriter writer){} + + public void read(int arrayIndex, NullableIntervalMonthDayNanoHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(Decimal256Holder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableDecimal256Holder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, Decimal256Holder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(Decimal256Writer writer){} + public void copyAsField(String name, Decimal256Writer writer){} + + public void read(int arrayIndex, NullableDecimal256Holder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(DecimalHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableDecimalHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, DecimalHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(DecimalWriter writer){} + public void copyAsField(String name, DecimalWriter writer){} + + public void read(int arrayIndex, NullableDecimalHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(FixedSizeBinaryHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableFixedSizeBinaryHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, FixedSizeBinaryHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(FixedSizeBinaryWriter writer){} + public void copyAsField(String name, FixedSizeBinaryWriter writer){} + + public void read(int arrayIndex, NullableFixedSizeBinaryHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(VarBinaryHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableVarBinaryHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, VarBinaryHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(VarBinaryWriter writer){} + public void copyAsField(String name, VarBinaryWriter writer){} + + public void read(int arrayIndex, NullableVarBinaryHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(VarCharHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableVarCharHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, VarCharHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(VarCharWriter writer){} + public void copyAsField(String name, VarCharWriter writer){} + + public void read(int arrayIndex, NullableVarCharHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(LargeVarCharHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableLargeVarCharHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, LargeVarCharHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(LargeVarCharWriter writer){} + public void copyAsField(String name, LargeVarCharWriter writer){} + + public void read(int arrayIndex, NullableLargeVarCharHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(LargeVarBinaryHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableLargeVarBinaryHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, LargeVarBinaryHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(LargeVarBinaryWriter writer){} + public void copyAsField(String name, LargeVarBinaryWriter writer){} + + public void read(int arrayIndex, NullableLargeVarBinaryHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + public void read(BitHolder holder){ + throw new UnsupportedOperationException("NullReader cannot write into non-nullable holder"); + } + + public void read(NullableBitHolder holder){ + holder.isSet = 0; + } + + public void read(int arrayIndex, BitHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public void copyAsValue(BitWriter writer){} + public void copyAsField(String name, BitWriter writer){} + + public void read(int arrayIndex, NullableBitHolder holder){ + throw new ArrayIndexOutOfBoundsException(); + } + + public int size(){ + return 0; + } + + public boolean isSet(){ + return false; + } + + public boolean next(){ + return false; + } + + public RepeatedStructReader struct(){ + return this; + } + + public RepeatedListReader list(){ + return this; + } + + public StructReader struct(String name){ + return this; + } + + public ListReader list(String name){ + return this; + } + + public FieldReader reader(String name){ + return this; + } + + public FieldReader reader(){ + return this; + } + + private void fail(String name){ + throw new IllegalArgumentException(String.format("You tried to read a %s type when you are using a ValueReader of type %s.", name, this.getClass().getSimpleName())); + } + + + public Object readObject(int arrayIndex){ + return null; + } + + public Object readObject(){ + return null; + } + + public BigDecimal readBigDecimal(int arrayIndex){ + return null; + } + + public BigDecimal readBigDecimal(){ + return null; + } + + public Short readShort(int arrayIndex){ + return null; + } + + public Short readShort(){ + return null; + } + + public Integer readInteger(int arrayIndex){ + return null; + } + + public Integer readInteger(){ + return null; + } + + public Long readLong(int arrayIndex){ + return null; + } + + public Long readLong(){ + return null; + } + + public Boolean readBoolean(int arrayIndex){ + return null; + } + + public Boolean readBoolean(){ + return null; + } + + public LocalDateTime readLocalDateTime(int arrayIndex){ + return null; + } + + public LocalDateTime readLocalDateTime(){ + return null; + } + + public Duration readDuration(int arrayIndex){ + return null; + } + + public Duration readDuration(){ + return null; + } + + public Period readPeriod(int arrayIndex){ + return null; + } + + public Period readPeriod(){ + return null; + } + + public Double readDouble(int arrayIndex){ + return null; + } + + public Double readDouble(){ + return null; + } + + public Float readFloat(int arrayIndex){ + return null; + } + + public Float readFloat(){ + return null; + } + + public Character readCharacter(int arrayIndex){ + return null; + } + + public Character readCharacter(){ + return null; + } + + public Text readText(int arrayIndex){ + return null; + } + + public Text readText(){ + return null; + } + + public String readString(int arrayIndex){ + return null; + } + + public String readString(){ + return null; + } + + public Byte readByte(int arrayIndex){ + return null; + } + + public Byte readByte(){ + return null; + } + + public byte[] readByteArray(int arrayIndex){ + return null; + } + + public byte[] readByteArray(){ + return null; + } + + public PeriodDuration readPeriodDuration(int arrayIndex){ + return null; + } + + public PeriodDuration readPeriodDuration(){ + return null; + } + +} + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableBigIntHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableBigIntHolderReaderImpl.java new file mode 100644 index 000000000000..3e473b022163 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableBigIntHolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableBigIntHolderReaderImpl extends AbstractFieldReader { + + private NullableBigIntHolder holder; + public NullableBigIntHolderReaderImpl(NullableBigIntHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.BIGINT; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(BigIntHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableBigIntHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Long readLong() { + if (!isSet()) { + return null; + } + + Long value = new Long(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readLong(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableBitHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableBitHolderReaderImpl.java new file mode 100644 index 000000000000..4ce6e2dcd234 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableBitHolderReaderImpl.java @@ -0,0 +1,125 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableBitHolderReaderImpl extends AbstractFieldReader { + + private NullableBitHolder holder; + public NullableBitHolderReaderImpl(NullableBitHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.BIT; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(BitHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableBitHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Boolean readBoolean() { + if (!isSet()) { + return null; + } + + return new Boolean(holder.value != 0); + } + + @Override + public Object readObject() { + return readBoolean(); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableCaseSensitiveStructWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableCaseSensitiveStructWriter.java new file mode 100644 index 000000000000..b2c9dbf27a33 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableCaseSensitiveStructWriter.java @@ -0,0 +1,81 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker and the CaseSensitiveStructWriters.java template. + */ +@SuppressWarnings("unused") +public class NullableCaseSensitiveStructWriter extends NullableStructWriter { + public NullableCaseSensitiveStructWriter(StructVector container) { + super(container); + } + + @Override + protected String handleCase(final String input){ + return input; + } + + @Override + protected NullableStructWriterFactory getNullableStructWriterFactory() { + return NullableStructWriterFactory.getNullableCaseSensitiveStructWriterFactoryInstance(); + } + +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableDateDayHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableDateDayHolderReaderImpl.java new file mode 100644 index 000000000000..7ac0c0f2a0e5 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableDateDayHolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableDateDayHolderReaderImpl extends AbstractFieldReader { + + private NullableDateDayHolder holder; + public NullableDateDayHolderReaderImpl(NullableDateDayHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.DATEDAY; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(DateDayHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableDateDayHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Integer readInteger() { + if (!isSet()) { + return null; + } + + Integer value = new Integer(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readInteger(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableDateMilliHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableDateMilliHolderReaderImpl.java new file mode 100644 index 000000000000..2ff46e34723a --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableDateMilliHolderReaderImpl.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableDateMilliHolderReaderImpl extends AbstractFieldReader { + + private NullableDateMilliHolder holder; + public NullableDateMilliHolderReaderImpl(NullableDateMilliHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.DATEMILLI; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(DateMilliHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableDateMilliHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public LocalDateTime readLocalDateTime() { + if (!isSet()) { + return null; + } + + return DateUtility.getLocalDateTimeFromEpochMilli(holder.value); + } + + @Override + public Object readObject() { + return readLocalDateTime(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableDecimal256HolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableDecimal256HolderReaderImpl.java new file mode 100644 index 000000000000..1a888bbdd77f --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableDecimal256HolderReaderImpl.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableDecimal256HolderReaderImpl extends AbstractFieldReader { + + private NullableDecimal256Holder holder; + public NullableDecimal256HolderReaderImpl(NullableDecimal256Holder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.DECIMAL256; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(Decimal256Holder h) { + h.start = holder.start; + h.buffer = holder.buffer; + h.scale = holder.scale; + h.precision = holder.precision; + } + + @Override + public void read(NullableDecimal256Holder h) { + h.start = holder.start; + h.buffer = holder.buffer; + h.scale = holder.scale; + h.precision = holder.precision; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public BigDecimal readBigDecimal() { + if (!isSet()) { + return null; + } + + byte[] bytes = new byte[32]; + holder.buffer.getBytes(holder.start, bytes, 0, 32); + BigDecimal value = new BigDecimal(new BigInteger(bytes), holder.scale); + return value; + } + + @Override + public Object readObject() { + return readBigDecimal(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableDecimalHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableDecimalHolderReaderImpl.java new file mode 100644 index 000000000000..8cebb2897e3c --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableDecimalHolderReaderImpl.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableDecimalHolderReaderImpl extends AbstractFieldReader { + + private NullableDecimalHolder holder; + public NullableDecimalHolderReaderImpl(NullableDecimalHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.DECIMAL; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(DecimalHolder h) { + h.start = holder.start; + h.buffer = holder.buffer; + h.scale = holder.scale; + h.precision = holder.precision; + } + + @Override + public void read(NullableDecimalHolder h) { + h.start = holder.start; + h.buffer = holder.buffer; + h.scale = holder.scale; + h.precision = holder.precision; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public BigDecimal readBigDecimal() { + if (!isSet()) { + return null; + } + + byte[] bytes = new byte[16]; + holder.buffer.getBytes(holder.start, bytes, 0, 16); + BigDecimal value = new BigDecimal(new BigInteger(bytes), holder.scale); + return value; + } + + @Override + public Object readObject() { + return readBigDecimal(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableDurationHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableDurationHolderReaderImpl.java new file mode 100644 index 000000000000..7ffd487ba430 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableDurationHolderReaderImpl.java @@ -0,0 +1,128 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableDurationHolderReaderImpl extends AbstractFieldReader { + + private NullableDurationHolder holder; + public NullableDurationHolderReaderImpl(NullableDurationHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.DURATION; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(DurationHolder h) { + h.value = holder.value; + h.unit = holder.unit; + } + + @Override + public void read(NullableDurationHolder h) { + h.value = holder.value; + h.unit = holder.unit; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Duration readDuration() { + if (!isSet()) { + return null; + } + + return DurationVector.toDuration(holder.value, holder.unit); + } + + @Override + public Object readObject() { + return readDuration(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableFixedSizeBinaryHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableFixedSizeBinaryHolderReaderImpl.java new file mode 100644 index 000000000000..290c704b4765 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableFixedSizeBinaryHolderReaderImpl.java @@ -0,0 +1,130 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableFixedSizeBinaryHolderReaderImpl extends AbstractFieldReader { + + private NullableFixedSizeBinaryHolder holder; + public NullableFixedSizeBinaryHolderReaderImpl(NullableFixedSizeBinaryHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.FIXEDSIZEBINARY; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(FixedSizeBinaryHolder h) { + h.buffer = holder.buffer; + h.byteWidth = holder.byteWidth; + } + + @Override + public void read(NullableFixedSizeBinaryHolder h) { + h.buffer = holder.buffer; + h.byteWidth = holder.byteWidth; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public byte[] readByteArray() { + if (!isSet()) { + return null; + } + + byte[] value = new byte [holder.byteWidth]; + holder.buffer.getBytes(0, value, 0, holder.byteWidth); + return value; + } + + @Override + public Object readObject() { + return readByteArray(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableFloat2HolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableFloat2HolderReaderImpl.java new file mode 100644 index 000000000000..0e047b462284 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableFloat2HolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableFloat2HolderReaderImpl extends AbstractFieldReader { + + private NullableFloat2Holder holder; + public NullableFloat2HolderReaderImpl(NullableFloat2Holder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.FLOAT2; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(Float2Holder h) { + h.value = holder.value; + } + + @Override + public void read(NullableFloat2Holder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Short readShort() { + if (!isSet()) { + return null; + } + + Short value = new Short(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readShort(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableFloat4HolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableFloat4HolderReaderImpl.java new file mode 100644 index 000000000000..8822cd0341ef --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableFloat4HolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableFloat4HolderReaderImpl extends AbstractFieldReader { + + private NullableFloat4Holder holder; + public NullableFloat4HolderReaderImpl(NullableFloat4Holder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.FLOAT4; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(Float4Holder h) { + h.value = holder.value; + } + + @Override + public void read(NullableFloat4Holder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Float readFloat() { + if (!isSet()) { + return null; + } + + Float value = new Float(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readFloat(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableFloat8HolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableFloat8HolderReaderImpl.java new file mode 100644 index 000000000000..e2dae421732a --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableFloat8HolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableFloat8HolderReaderImpl extends AbstractFieldReader { + + private NullableFloat8Holder holder; + public NullableFloat8HolderReaderImpl(NullableFloat8Holder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.FLOAT8; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(Float8Holder h) { + h.value = holder.value; + } + + @Override + public void read(NullableFloat8Holder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Double readDouble() { + if (!isSet()) { + return null; + } + + Double value = new Double(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readDouble(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableIntHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableIntHolderReaderImpl.java new file mode 100644 index 000000000000..a386a150ec18 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableIntHolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableIntHolderReaderImpl extends AbstractFieldReader { + + private NullableIntHolder holder; + public NullableIntHolderReaderImpl(NullableIntHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.INT; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(IntHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableIntHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Integer readInteger() { + if (!isSet()) { + return null; + } + + Integer value = new Integer(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readInteger(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableIntervalDayHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableIntervalDayHolderReaderImpl.java new file mode 100644 index 000000000000..20426521669f --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableIntervalDayHolderReaderImpl.java @@ -0,0 +1,128 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableIntervalDayHolderReaderImpl extends AbstractFieldReader { + + private NullableIntervalDayHolder holder; + public NullableIntervalDayHolderReaderImpl(NullableIntervalDayHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.INTERVALDAY; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(IntervalDayHolder h) { + h.days = holder.days; + h.milliseconds = holder.milliseconds; + } + + @Override + public void read(NullableIntervalDayHolder h) { + h.days = holder.days; + h.milliseconds = holder.milliseconds; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Duration readDuration() { + if (!isSet()) { + return null; + } + + return Duration.ofDays(holder.days).plusMillis(holder.milliseconds); + } + + @Override + public Object readObject() { + return readDuration(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableIntervalMonthDayNanoHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableIntervalMonthDayNanoHolderReaderImpl.java new file mode 100644 index 000000000000..6069a4a8e586 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableIntervalMonthDayNanoHolderReaderImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableIntervalMonthDayNanoHolderReaderImpl extends AbstractFieldReader { + + private NullableIntervalMonthDayNanoHolder holder; + public NullableIntervalMonthDayNanoHolderReaderImpl(NullableIntervalMonthDayNanoHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.INTERVALMONTHDAYNANO; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(IntervalMonthDayNanoHolder h) { + h.months = holder.months; + h.days = holder.days; + h.nanoseconds = holder.nanoseconds; + } + + @Override + public void read(NullableIntervalMonthDayNanoHolder h) { + h.months = holder.months; + h.days = holder.days; + h.nanoseconds = holder.nanoseconds; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public PeriodDuration readPeriodDuration() { + if (!isSet()) { + return null; + } + + return new PeriodDuration(Period.ofMonths(holder.months).plusDays(holder.days), + Duration.ofNanos(holder.nanoseconds)); + } + + @Override + public Object readObject() { + return readPeriodDuration(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableIntervalYearHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableIntervalYearHolderReaderImpl.java new file mode 100644 index 000000000000..d4cad57a6954 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableIntervalYearHolderReaderImpl.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableIntervalYearHolderReaderImpl extends AbstractFieldReader { + + private NullableIntervalYearHolder holder; + public NullableIntervalYearHolderReaderImpl(NullableIntervalYearHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.INTERVALYEAR; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(IntervalYearHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableIntervalYearHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Period readPeriod() { + if (!isSet()) { + return null; + } + + return Period.ofMonths(holder.value); + } + + @Override + public Object readObject() { + return readPeriod(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableLargeVarBinaryHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableLargeVarBinaryHolderReaderImpl.java new file mode 100644 index 000000000000..af7621121ffd --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableLargeVarBinaryHolderReaderImpl.java @@ -0,0 +1,133 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableLargeVarBinaryHolderReaderImpl extends AbstractFieldReader { + + private NullableLargeVarBinaryHolder holder; + public NullableLargeVarBinaryHolderReaderImpl(NullableLargeVarBinaryHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.LARGEVARBINARY; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(LargeVarBinaryHolder h) { + h.start = holder.start; + h.end = holder.end; + h.buffer = holder.buffer; + } + + @Override + public void read(NullableLargeVarBinaryHolder h) { + h.start = holder.start; + h.end = holder.end; + h.buffer = holder.buffer; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public byte[] readByteArray() { + if (!isSet()) { + return null; + } + + int length = (int) (holder.end - holder.start); + byte[] value = new byte [length]; + holder.buffer.getBytes(holder.start, value, 0, length); + return value; + } + + @Override + public Object readObject() { + return readByteArray(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableLargeVarCharHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableLargeVarCharHolderReaderImpl.java new file mode 100644 index 000000000000..043be1a0f9f1 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableLargeVarCharHolderReaderImpl.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableLargeVarCharHolderReaderImpl extends AbstractFieldReader { + + private NullableLargeVarCharHolder holder; + public NullableLargeVarCharHolderReaderImpl(NullableLargeVarCharHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.LARGEVARCHAR; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(LargeVarCharHolder h) { + h.start = holder.start; + h.end = holder.end; + h.buffer = holder.buffer; + } + + @Override + public void read(NullableLargeVarCharHolder h) { + h.start = holder.start; + h.end = holder.end; + h.buffer = holder.buffer; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Text readText() { + if (!isSet()) { + return null; + } + + int length = (int) (holder.end - holder.start); + byte[] value = new byte [length]; + holder.buffer.getBytes(holder.start, value, 0, length); + Text text = new Text(); + text.set(value); + return text; + } + + @Override + public Object readObject() { + return readText(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableSmallIntHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableSmallIntHolderReaderImpl.java new file mode 100644 index 000000000000..7c33d5138630 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableSmallIntHolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableSmallIntHolderReaderImpl extends AbstractFieldReader { + + private NullableSmallIntHolder holder; + public NullableSmallIntHolderReaderImpl(NullableSmallIntHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.SMALLINT; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(SmallIntHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableSmallIntHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Short readShort() { + if (!isSet()) { + return null; + } + + Short value = new Short(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readShort(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableStructWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableStructWriter.java new file mode 100644 index 000000000000..f2e53afea491 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableStructWriter.java @@ -0,0 +1,1647 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +import java.util.Map; +import java.util.HashMap; + +import org.apache.arrow.vector.holders.RepeatedStructHolder; +import org.apache.arrow.vector.AllocationHelper; +import org.apache.arrow.vector.complex.reader.FieldReader; +import org.apache.arrow.vector.complex.writer.FieldWriter; + + +/* + * This class is generated using FreeMarker and the StructWriters.java template. + */ +@SuppressWarnings("unused") +public class NullableStructWriter extends AbstractFieldWriter { + + protected final StructVector container; + private int initialCapacity; + private final Map fields = new HashMap<>(); + public NullableStructWriter(StructVector container) { + this.container = container; + this.initialCapacity = 0; + for (Field child : container.getField().getChildren()) { + MinorType minorType = Types.getMinorTypeForArrowType(child.getType()); + switch (minorType) { + case STRUCT: + struct(child.getName()); + break; + case LIST: + list(child.getName()); + break; + case MAP: { + ArrowType.Map arrowType = (ArrowType.Map) child.getType(); + map(child.getName(), arrowType.getKeysSorted()); + break; + } + case DENSEUNION: { + FieldType fieldType = new FieldType(addVectorAsNullable, MinorType.DENSEUNION.getType(), null, null); + DenseUnionWriter writer = new DenseUnionWriter(container.addOrGet(child.getName(), fieldType, DenseUnionVector.class), getNullableStructWriterFactory()); + fields.put(handleCase(child.getName()), writer); + break; + } + case UNION: + FieldType fieldType = new FieldType(addVectorAsNullable, MinorType.UNION.getType(), null, null); + UnionWriter writer = new UnionWriter(container.addOrGet(child.getName(), fieldType, UnionVector.class), getNullableStructWriterFactory()); + fields.put(handleCase(child.getName()), writer); + break; + case TINYINT: { + tinyInt(child.getName()); + break; + } + case UINT1: { + uInt1(child.getName()); + break; + } + case UINT2: { + uInt2(child.getName()); + break; + } + case SMALLINT: { + smallInt(child.getName()); + break; + } + case FLOAT2: { + float2(child.getName()); + break; + } + case INT: { + integer(child.getName()); + break; + } + case UINT4: { + uInt4(child.getName()); + break; + } + case FLOAT4: { + float4(child.getName()); + break; + } + case DATEDAY: { + dateDay(child.getName()); + break; + } + case INTERVALYEAR: { + intervalYear(child.getName()); + break; + } + case TIMESEC: { + timeSec(child.getName()); + break; + } + case TIMEMILLI: { + timeMilli(child.getName()); + break; + } + case BIGINT: { + bigInt(child.getName()); + break; + } + case UINT8: { + uInt8(child.getName()); + break; + } + case FLOAT8: { + float8(child.getName()); + break; + } + case DATEMILLI: { + dateMilli(child.getName()); + break; + } + case DURATION: { + org.apache.arrow.vector.types.pojo.ArrowType.Duration arrowType = (org.apache.arrow.vector.types.pojo.ArrowType.Duration)child.getType(); + duration(child.getName(), arrowType.getUnit()); + break; + } + case TIMESTAMPSEC: { + timeStampSec(child.getName()); + break; + } + case TIMESTAMPMILLI: { + timeStampMilli(child.getName()); + break; + } + case TIMESTAMPMICRO: { + timeStampMicro(child.getName()); + break; + } + case TIMESTAMPNANO: { + timeStampNano(child.getName()); + break; + } + case TIMESTAMPSECTZ: { + org.apache.arrow.vector.types.pojo.ArrowType.Timestamp arrowType = (org.apache.arrow.vector.types.pojo.ArrowType.Timestamp)child.getType(); + timeStampSecTZ(child.getName(), arrowType.getTimezone()); + break; + } + case TIMESTAMPMILLITZ: { + org.apache.arrow.vector.types.pojo.ArrowType.Timestamp arrowType = (org.apache.arrow.vector.types.pojo.ArrowType.Timestamp)child.getType(); + timeStampMilliTZ(child.getName(), arrowType.getTimezone()); + break; + } + case TIMESTAMPMICROTZ: { + org.apache.arrow.vector.types.pojo.ArrowType.Timestamp arrowType = (org.apache.arrow.vector.types.pojo.ArrowType.Timestamp)child.getType(); + timeStampMicroTZ(child.getName(), arrowType.getTimezone()); + break; + } + case TIMESTAMPNANOTZ: { + org.apache.arrow.vector.types.pojo.ArrowType.Timestamp arrowType = (org.apache.arrow.vector.types.pojo.ArrowType.Timestamp)child.getType(); + timeStampNanoTZ(child.getName(), arrowType.getTimezone()); + break; + } + case TIMEMICRO: { + timeMicro(child.getName()); + break; + } + case TIMENANO: { + timeNano(child.getName()); + break; + } + case INTERVALDAY: { + intervalDay(child.getName()); + break; + } + case INTERVALMONTHDAYNANO: { + intervalMonthDayNano(child.getName()); + break; + } + case DECIMAL256: { + org.apache.arrow.vector.types.pojo.ArrowType.Decimal arrowType = (org.apache.arrow.vector.types.pojo.ArrowType.Decimal)child.getType(); + decimal256(child.getName(), arrowType.getScale(), arrowType.getPrecision()); + break; + } + case DECIMAL: { + org.apache.arrow.vector.types.pojo.ArrowType.Decimal arrowType = (org.apache.arrow.vector.types.pojo.ArrowType.Decimal)child.getType(); + decimal(child.getName(), arrowType.getScale(), arrowType.getPrecision()); + break; + } + case FIXEDSIZEBINARY: { + org.apache.arrow.vector.types.pojo.ArrowType.FixedSizeBinary arrowType = (org.apache.arrow.vector.types.pojo.ArrowType.FixedSizeBinary)child.getType(); + fixedSizeBinary(child.getName(), arrowType.getByteWidth()); + break; + } + case VARBINARY: { + varBinary(child.getName()); + break; + } + case VARCHAR: { + varChar(child.getName()); + break; + } + case LARGEVARCHAR: { + largeVarChar(child.getName()); + break; + } + case LARGEVARBINARY: { + largeVarBinary(child.getName()); + break; + } + case BIT: { + bit(child.getName()); + break; + } + default: + throw new UnsupportedOperationException("Unknown type: " + minorType); + } + } + } + + protected String handleCase(final String input) { + return input.toLowerCase(); + } + + protected NullableStructWriterFactory getNullableStructWriterFactory() { + return NullableStructWriterFactory.getNullableStructWriterFactoryInstance(); + } + + @Override + public int getValueCapacity() { + return container.getValueCapacity(); + } + + public void setInitialCapacity(int initialCapacity) { + this.initialCapacity = initialCapacity; + container.setInitialCapacity(initialCapacity); + } + + @Override + public boolean isEmptyStruct() { + return 0 == container.size(); + } + + @Override + public Field getField() { + return container.getField(); + } + + @Override + public StructWriter struct(String name) { + String finalName = handleCase(name); + FieldWriter writer = fields.get(finalName); + if(writer == null){ + int vectorCount=container.size(); + FieldType fieldType = new FieldType(addVectorAsNullable, MinorType.STRUCT.getType(), null, null); + StructVector vector = container.addOrGet(name, fieldType, StructVector.class); + writer = new PromotableWriter(vector, container, getNullableStructWriterFactory()); + if(vectorCount != container.size()) { + writer.allocate(); + } + writer.setPosition(idx()); + fields.put(finalName, writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.STRUCT); + } + } + return writer; + } + + @Override + public void close() throws Exception { + clear(); + container.close(); + } + + @Override + public void allocate() { + container.allocateNew(); + for(final FieldWriter w : fields.values()) { + w.allocate(); + } + } + + @Override + public void clear() { + container.clear(); + for(final FieldWriter w : fields.values()) { + w.clear(); + } + } + + @Override + public ListWriter list(String name) { + String finalName = handleCase(name); + FieldWriter writer = fields.get(finalName); + int vectorCount = container.size(); + if(writer == null) { + FieldType fieldType = new FieldType(addVectorAsNullable, MinorType.LIST.getType(), null, null); + writer = new PromotableWriter(container.addOrGet(name, fieldType, ListVector.class), container, getNullableStructWriterFactory()); + if (container.size() > vectorCount) { + writer.allocate(); + } + writer.setPosition(idx()); + fields.put(finalName, writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.LIST); + } + } + return writer; + } + + @Override + public MapWriter map(String name) { + return map(name, false); + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + MapVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + new ArrowType.Map(keysSorted) + ,null, null), + MapVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.MAP, new ArrowType.Map(keysSorted)); + } + } + return writer; + } + + public void setValueCount(int count) { + container.setValueCount(count); + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + for(final FieldWriter w: fields.values()) { + w.setPosition(index); + } + } + + @Override + public void writeNull() { + container.setNull(idx()); + setValueCount(idx()+1); + super.setPosition(idx()+1); + } + + @Override + public void start() { + container.setIndexDefined(idx()); + } + + @Override + public void end() { + setPosition(idx()+1); + } + + + @Override + public TinyIntWriter tinyInt(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TinyIntVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.TINYINT.getType() + ,null, null), + TinyIntVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.TINYINT); + } + } + return writer; + } + + + @Override + public UInt1Writer uInt1(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + UInt1Vector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.UINT1.getType() + ,null, null), + UInt1Vector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.UINT1); + } + } + return writer; + } + + + @Override + public UInt2Writer uInt2(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + UInt2Vector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.UINT2.getType() + ,null, null), + UInt2Vector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.UINT2); + } + } + return writer; + } + + + @Override + public SmallIntWriter smallInt(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + SmallIntVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.SMALLINT.getType() + ,null, null), + SmallIntVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.SMALLINT); + } + } + return writer; + } + + + @Override + public Float2Writer float2(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + Float2Vector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.FLOAT2.getType() + ,null, null), + Float2Vector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.FLOAT2); + } + } + return writer; + } + + + @Override + public IntWriter integer(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + IntVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.INT.getType() + ,null, null), + IntVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.INT); + } + } + return writer; + } + + + @Override + public UInt4Writer uInt4(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + UInt4Vector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.UINT4.getType() + ,null, null), + UInt4Vector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.UINT4); + } + } + return writer; + } + + + @Override + public Float4Writer float4(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + Float4Vector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.FLOAT4.getType() + ,null, null), + Float4Vector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.FLOAT4); + } + } + return writer; + } + + + @Override + public DateDayWriter dateDay(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + DateDayVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.DATEDAY.getType() + ,null, null), + DateDayVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.DATEDAY); + } + } + return writer; + } + + + @Override + public IntervalYearWriter intervalYear(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + IntervalYearVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.INTERVALYEAR.getType() + ,null, null), + IntervalYearVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.INTERVALYEAR); + } + } + return writer; + } + + + @Override + public TimeSecWriter timeSec(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeSecVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.TIMESEC.getType() + ,null, null), + TimeSecVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.TIMESEC); + } + } + return writer; + } + + + @Override + public TimeMilliWriter timeMilli(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeMilliVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.TIMEMILLI.getType() + ,null, null), + TimeMilliVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.TIMEMILLI); + } + } + return writer; + } + + + @Override + public BigIntWriter bigInt(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + BigIntVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.BIGINT.getType() + ,null, null), + BigIntVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.BIGINT); + } + } + return writer; + } + + + @Override + public UInt8Writer uInt8(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + UInt8Vector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.UINT8.getType() + ,null, null), + UInt8Vector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.UINT8); + } + } + return writer; + } + + + @Override + public Float8Writer float8(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + Float8Vector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.FLOAT8.getType() + ,null, null), + Float8Vector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.FLOAT8); + } + } + return writer; + } + + + @Override + public DateMilliWriter dateMilli(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + DateMilliVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.DATEMILLI.getType() + ,null, null), + DateMilliVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.DATEMILLI); + } + } + return writer; + } + + + @Override + public DurationWriter duration(String name) { + // returns existing writer + final FieldWriter writer = fields.get(handleCase(name)); + Preconditions.checkNotNull(writer); + return writer; + } + + @Override + public DurationWriter duration(String name, org.apache.arrow.vector.types.TimeUnit unit) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + DurationVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + new org.apache.arrow.vector.types.pojo.ArrowType.Duration(unit) + ,null, null), + DurationVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ArrowType arrowType = new org.apache.arrow.vector.types.pojo.ArrowType.Duration(unit); + ((PromotableWriter)writer).getWriter(MinorType.DURATION, arrowType); + } + } + return writer; + } + + + @Override + public TimeStampSecWriter timeStampSec(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeStampSecVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.TIMESTAMPSEC.getType() + ,null, null), + TimeStampSecVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.TIMESTAMPSEC); + } + } + return writer; + } + + + @Override + public TimeStampMilliWriter timeStampMilli(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeStampMilliVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.TIMESTAMPMILLI.getType() + ,null, null), + TimeStampMilliVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.TIMESTAMPMILLI); + } + } + return writer; + } + + + @Override + public TimeStampMicroWriter timeStampMicro(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeStampMicroVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.TIMESTAMPMICRO.getType() + ,null, null), + TimeStampMicroVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.TIMESTAMPMICRO); + } + } + return writer; + } + + + @Override + public TimeStampNanoWriter timeStampNano(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeStampNanoVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.TIMESTAMPNANO.getType() + ,null, null), + TimeStampNanoVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.TIMESTAMPNANO); + } + } + return writer; + } + + + @Override + public TimeStampSecTZWriter timeStampSecTZ(String name) { + // returns existing writer + final FieldWriter writer = fields.get(handleCase(name)); + Preconditions.checkNotNull(writer); + return writer; + } + + @Override + public TimeStampSecTZWriter timeStampSecTZ(String name, String timezone) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeStampSecTZVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + new org.apache.arrow.vector.types.pojo.ArrowType.Timestamp(org.apache.arrow.vector.types.TimeUnit.SECOND, timezone) + ,null, null), + TimeStampSecTZVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ArrowType arrowType = new org.apache.arrow.vector.types.pojo.ArrowType.Timestamp(org.apache.arrow.vector.types.TimeUnit.SECOND, timezone); + ((PromotableWriter)writer).getWriter(MinorType.TIMESTAMPSECTZ, arrowType); + } + } + return writer; + } + + + @Override + public TimeStampMilliTZWriter timeStampMilliTZ(String name) { + // returns existing writer + final FieldWriter writer = fields.get(handleCase(name)); + Preconditions.checkNotNull(writer); + return writer; + } + + @Override + public TimeStampMilliTZWriter timeStampMilliTZ(String name, String timezone) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeStampMilliTZVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + new org.apache.arrow.vector.types.pojo.ArrowType.Timestamp(org.apache.arrow.vector.types.TimeUnit.MILLISECOND, timezone) + ,null, null), + TimeStampMilliTZVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ArrowType arrowType = new org.apache.arrow.vector.types.pojo.ArrowType.Timestamp(org.apache.arrow.vector.types.TimeUnit.MILLISECOND, timezone); + ((PromotableWriter)writer).getWriter(MinorType.TIMESTAMPMILLITZ, arrowType); + } + } + return writer; + } + + + @Override + public TimeStampMicroTZWriter timeStampMicroTZ(String name) { + // returns existing writer + final FieldWriter writer = fields.get(handleCase(name)); + Preconditions.checkNotNull(writer); + return writer; + } + + @Override + public TimeStampMicroTZWriter timeStampMicroTZ(String name, String timezone) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeStampMicroTZVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + new org.apache.arrow.vector.types.pojo.ArrowType.Timestamp(org.apache.arrow.vector.types.TimeUnit.MICROSECOND, timezone) + ,null, null), + TimeStampMicroTZVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ArrowType arrowType = new org.apache.arrow.vector.types.pojo.ArrowType.Timestamp(org.apache.arrow.vector.types.TimeUnit.MICROSECOND, timezone); + ((PromotableWriter)writer).getWriter(MinorType.TIMESTAMPMICROTZ, arrowType); + } + } + return writer; + } + + + @Override + public TimeStampNanoTZWriter timeStampNanoTZ(String name) { + // returns existing writer + final FieldWriter writer = fields.get(handleCase(name)); + Preconditions.checkNotNull(writer); + return writer; + } + + @Override + public TimeStampNanoTZWriter timeStampNanoTZ(String name, String timezone) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeStampNanoTZVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + new org.apache.arrow.vector.types.pojo.ArrowType.Timestamp(org.apache.arrow.vector.types.TimeUnit.NANOSECOND, timezone) + ,null, null), + TimeStampNanoTZVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ArrowType arrowType = new org.apache.arrow.vector.types.pojo.ArrowType.Timestamp(org.apache.arrow.vector.types.TimeUnit.NANOSECOND, timezone); + ((PromotableWriter)writer).getWriter(MinorType.TIMESTAMPNANOTZ, arrowType); + } + } + return writer; + } + + + @Override + public TimeMicroWriter timeMicro(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeMicroVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.TIMEMICRO.getType() + ,null, null), + TimeMicroVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.TIMEMICRO); + } + } + return writer; + } + + + @Override + public TimeNanoWriter timeNano(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeNanoVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.TIMENANO.getType() + ,null, null), + TimeNanoVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.TIMENANO); + } + } + return writer; + } + + + @Override + public IntervalDayWriter intervalDay(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + IntervalDayVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.INTERVALDAY.getType() + ,null, null), + IntervalDayVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.INTERVALDAY); + } + } + return writer; + } + + + @Override + public IntervalMonthDayNanoWriter intervalMonthDayNano(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + IntervalMonthDayNanoVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.INTERVALMONTHDAYNANO.getType() + ,null, null), + IntervalMonthDayNanoVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.INTERVALMONTHDAYNANO); + } + } + return writer; + } + + + @Override + public Decimal256Writer decimal256(String name) { + // returns existing writer + final FieldWriter writer = fields.get(handleCase(name)); + Preconditions.checkNotNull(writer); + return writer; + } + + @Override + public Decimal256Writer decimal256(String name, int scale, int precision) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + Decimal256Vector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + new org.apache.arrow.vector.types.pojo.ArrowType.Decimal(precision, scale, Decimal256Vector.TYPE_WIDTH * 8) + ,null, null), + Decimal256Vector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.DECIMAL256, new org.apache.arrow.vector.types.pojo.ArrowType.Decimal(precision, scale, Decimal256Vector.TYPE_WIDTH * 8)); + } + } + return writer; + } + + + @Override + public DecimalWriter decimal(String name) { + // returns existing writer + final FieldWriter writer = fields.get(handleCase(name)); + Preconditions.checkNotNull(writer); + return writer; + } + + @Override + public DecimalWriter decimal(String name, int scale, int precision) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + DecimalVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + new org.apache.arrow.vector.types.pojo.ArrowType.Decimal(precision, scale, DecimalVector.TYPE_WIDTH * 8) + ,null, null), + DecimalVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.DECIMAL, new org.apache.arrow.vector.types.pojo.ArrowType.Decimal(precision, scale, DecimalVector.TYPE_WIDTH * 8)); + } + } + return writer; + } + + + @Override + public FixedSizeBinaryWriter fixedSizeBinary(String name) { + // returns existing writer + final FieldWriter writer = fields.get(handleCase(name)); + Preconditions.checkNotNull(writer); + return writer; + } + + @Override + public FixedSizeBinaryWriter fixedSizeBinary(String name, int byteWidth) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + FixedSizeBinaryVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + new org.apache.arrow.vector.types.pojo.ArrowType.FixedSizeBinary(byteWidth) + ,null, null), + FixedSizeBinaryVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ArrowType arrowType = new org.apache.arrow.vector.types.pojo.ArrowType.FixedSizeBinary(byteWidth); + ((PromotableWriter)writer).getWriter(MinorType.FIXEDSIZEBINARY, arrowType); + } + } + return writer; + } + + + @Override + public VarBinaryWriter varBinary(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + VarBinaryVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.VARBINARY.getType() + ,null, null), + VarBinaryVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.VARBINARY); + } + } + return writer; + } + + + @Override + public VarCharWriter varChar(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + VarCharVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.VARCHAR.getType() + ,null, null), + VarCharVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.VARCHAR); + } + } + return writer; + } + + + @Override + public LargeVarCharWriter largeVarChar(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + LargeVarCharVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.LARGEVARCHAR.getType() + ,null, null), + LargeVarCharVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.LARGEVARCHAR); + } + } + return writer; + } + + + @Override + public LargeVarBinaryWriter largeVarBinary(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + LargeVarBinaryVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.LARGEVARBINARY.getType() + ,null, null), + LargeVarBinaryVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.LARGEVARBINARY); + } + } + return writer; + } + + + @Override + public BitWriter bit(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + BitVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.BIT.getType() + ,null, null), + BitVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.BIT); + } + } + return writer; + } + + +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeMicroHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeMicroHolderReaderImpl.java new file mode 100644 index 000000000000..7bebde503d44 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeMicroHolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableTimeMicroHolderReaderImpl extends AbstractFieldReader { + + private NullableTimeMicroHolder holder; + public NullableTimeMicroHolderReaderImpl(NullableTimeMicroHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMEMICRO; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(TimeMicroHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableTimeMicroHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Long readLong() { + if (!isSet()) { + return null; + } + + Long value = new Long(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readLong(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeMilliHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeMilliHolderReaderImpl.java new file mode 100644 index 000000000000..c9730c554017 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeMilliHolderReaderImpl.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableTimeMilliHolderReaderImpl extends AbstractFieldReader { + + private NullableTimeMilliHolder holder; + public NullableTimeMilliHolderReaderImpl(NullableTimeMilliHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMEMILLI; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(TimeMilliHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableTimeMilliHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public LocalDateTime readLocalDateTime() { + if (!isSet()) { + return null; + } + + return DateUtility.getLocalDateTimeFromEpochMilli(holder.value); + } + + @Override + public Object readObject() { + return readLocalDateTime(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeNanoHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeNanoHolderReaderImpl.java new file mode 100644 index 000000000000..32a7cec6d7e6 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeNanoHolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableTimeNanoHolderReaderImpl extends AbstractFieldReader { + + private NullableTimeNanoHolder holder; + public NullableTimeNanoHolderReaderImpl(NullableTimeNanoHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMENANO; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(TimeNanoHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableTimeNanoHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Long readLong() { + if (!isSet()) { + return null; + } + + Long value = new Long(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readLong(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeSecHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeSecHolderReaderImpl.java new file mode 100644 index 000000000000..c4195655fa40 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeSecHolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableTimeSecHolderReaderImpl extends AbstractFieldReader { + + private NullableTimeSecHolder holder; + public NullableTimeSecHolderReaderImpl(NullableTimeSecHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMESEC; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(TimeSecHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableTimeSecHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Integer readInteger() { + if (!isSet()) { + return null; + } + + Integer value = new Integer(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readInteger(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampMicroHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampMicroHolderReaderImpl.java new file mode 100644 index 000000000000..da8424f58cc3 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampMicroHolderReaderImpl.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableTimeStampMicroHolderReaderImpl extends AbstractFieldReader { + + private NullableTimeStampMicroHolder holder; + public NullableTimeStampMicroHolderReaderImpl(NullableTimeStampMicroHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMESTAMPMICRO; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(TimeStampMicroHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableTimeStampMicroHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public LocalDateTime readLocalDateTime() { + if (!isSet()) { + return null; + } + + return DateUtility.getLocalDateTimeFromEpochMicro(holder.value); + } + + @Override + public Object readObject() { + return readLocalDateTime(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampMicroTZHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampMicroTZHolderReaderImpl.java new file mode 100644 index 000000000000..18c4ef80f4c8 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampMicroTZHolderReaderImpl.java @@ -0,0 +1,129 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableTimeStampMicroTZHolderReaderImpl extends AbstractFieldReader { + + private NullableTimeStampMicroTZHolder holder; + public NullableTimeStampMicroTZHolderReaderImpl(NullableTimeStampMicroTZHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMESTAMPMICROTZ; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(TimeStampMicroTZHolder h) { + h.value = holder.value; + h.timezone = holder.timezone; + } + + @Override + public void read(NullableTimeStampMicroTZHolder h) { + h.value = holder.value; + h.timezone = holder.timezone; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Long readLong() { + if (!isSet()) { + return null; + } + + Long value = new Long(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readLong(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampMilliHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampMilliHolderReaderImpl.java new file mode 100644 index 000000000000..007988e2de51 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampMilliHolderReaderImpl.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableTimeStampMilliHolderReaderImpl extends AbstractFieldReader { + + private NullableTimeStampMilliHolder holder; + public NullableTimeStampMilliHolderReaderImpl(NullableTimeStampMilliHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMESTAMPMILLI; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(TimeStampMilliHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableTimeStampMilliHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public LocalDateTime readLocalDateTime() { + if (!isSet()) { + return null; + } + + return DateUtility.getLocalDateTimeFromEpochMilli(holder.value); + } + + @Override + public Object readObject() { + return readLocalDateTime(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampMilliTZHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampMilliTZHolderReaderImpl.java new file mode 100644 index 000000000000..98f4c944af02 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampMilliTZHolderReaderImpl.java @@ -0,0 +1,129 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableTimeStampMilliTZHolderReaderImpl extends AbstractFieldReader { + + private NullableTimeStampMilliTZHolder holder; + public NullableTimeStampMilliTZHolderReaderImpl(NullableTimeStampMilliTZHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMESTAMPMILLITZ; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(TimeStampMilliTZHolder h) { + h.value = holder.value; + h.timezone = holder.timezone; + } + + @Override + public void read(NullableTimeStampMilliTZHolder h) { + h.value = holder.value; + h.timezone = holder.timezone; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Long readLong() { + if (!isSet()) { + return null; + } + + Long value = new Long(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readLong(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampNanoHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampNanoHolderReaderImpl.java new file mode 100644 index 000000000000..45eda8f86c14 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampNanoHolderReaderImpl.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableTimeStampNanoHolderReaderImpl extends AbstractFieldReader { + + private NullableTimeStampNanoHolder holder; + public NullableTimeStampNanoHolderReaderImpl(NullableTimeStampNanoHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMESTAMPNANO; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(TimeStampNanoHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableTimeStampNanoHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public LocalDateTime readLocalDateTime() { + if (!isSet()) { + return null; + } + + return DateUtility.getLocalDateTimeFromEpochNano(holder.value); + } + + @Override + public Object readObject() { + return readLocalDateTime(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampNanoTZHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampNanoTZHolderReaderImpl.java new file mode 100644 index 000000000000..b0f900f5e4fa --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampNanoTZHolderReaderImpl.java @@ -0,0 +1,129 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableTimeStampNanoTZHolderReaderImpl extends AbstractFieldReader { + + private NullableTimeStampNanoTZHolder holder; + public NullableTimeStampNanoTZHolderReaderImpl(NullableTimeStampNanoTZHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMESTAMPNANOTZ; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(TimeStampNanoTZHolder h) { + h.value = holder.value; + h.timezone = holder.timezone; + } + + @Override + public void read(NullableTimeStampNanoTZHolder h) { + h.value = holder.value; + h.timezone = holder.timezone; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Long readLong() { + if (!isSet()) { + return null; + } + + Long value = new Long(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readLong(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampSecHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampSecHolderReaderImpl.java new file mode 100644 index 000000000000..0bd32db7fc48 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampSecHolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableTimeStampSecHolderReaderImpl extends AbstractFieldReader { + + private NullableTimeStampSecHolder holder; + public NullableTimeStampSecHolderReaderImpl(NullableTimeStampSecHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMESTAMPSEC; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(TimeStampSecHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableTimeStampSecHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public LocalDateTime readLocalDateTime() { + if (!isSet()) { + return null; + } + + final long millis = java.util.concurrent.TimeUnit.SECONDS.toMillis(holder.value); + return DateUtility.getLocalDateTimeFromEpochMilli(millis); + } + + @Override + public Object readObject() { + return readLocalDateTime(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampSecTZHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampSecTZHolderReaderImpl.java new file mode 100644 index 000000000000..59d0b1de878c --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTimeStampSecTZHolderReaderImpl.java @@ -0,0 +1,129 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableTimeStampSecTZHolderReaderImpl extends AbstractFieldReader { + + private NullableTimeStampSecTZHolder holder; + public NullableTimeStampSecTZHolderReaderImpl(NullableTimeStampSecTZHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMESTAMPSECTZ; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(TimeStampSecTZHolder h) { + h.value = holder.value; + h.timezone = holder.timezone; + } + + @Override + public void read(NullableTimeStampSecTZHolder h) { + h.value = holder.value; + h.timezone = holder.timezone; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Long readLong() { + if (!isSet()) { + return null; + } + + Long value = new Long(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readLong(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTinyIntHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTinyIntHolderReaderImpl.java new file mode 100644 index 000000000000..622fa7ec4cc4 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableTinyIntHolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableTinyIntHolderReaderImpl extends AbstractFieldReader { + + private NullableTinyIntHolder holder; + public NullableTinyIntHolderReaderImpl(NullableTinyIntHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TINYINT; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(TinyIntHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableTinyIntHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Byte readByte() { + if (!isSet()) { + return null; + } + + Byte value = new Byte(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readByte(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableUInt1HolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableUInt1HolderReaderImpl.java new file mode 100644 index 000000000000..640cb810ce6a --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableUInt1HolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableUInt1HolderReaderImpl extends AbstractFieldReader { + + private NullableUInt1Holder holder; + public NullableUInt1HolderReaderImpl(NullableUInt1Holder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.UINT1; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(UInt1Holder h) { + h.value = holder.value; + } + + @Override + public void read(NullableUInt1Holder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Byte readByte() { + if (!isSet()) { + return null; + } + + Byte value = new Byte(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readByte(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableUInt2HolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableUInt2HolderReaderImpl.java new file mode 100644 index 000000000000..993ff42147bc --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableUInt2HolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableUInt2HolderReaderImpl extends AbstractFieldReader { + + private NullableUInt2Holder holder; + public NullableUInt2HolderReaderImpl(NullableUInt2Holder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.UINT2; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(UInt2Holder h) { + h.value = holder.value; + } + + @Override + public void read(NullableUInt2Holder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Character readCharacter() { + if (!isSet()) { + return null; + } + + Character value = new Character(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readCharacter(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableUInt4HolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableUInt4HolderReaderImpl.java new file mode 100644 index 000000000000..c21ac49b6b63 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableUInt4HolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableUInt4HolderReaderImpl extends AbstractFieldReader { + + private NullableUInt4Holder holder; + public NullableUInt4HolderReaderImpl(NullableUInt4Holder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.UINT4; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(UInt4Holder h) { + h.value = holder.value; + } + + @Override + public void read(NullableUInt4Holder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Integer readInteger() { + if (!isSet()) { + return null; + } + + Integer value = new Integer(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readInteger(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableUInt8HolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableUInt8HolderReaderImpl.java new file mode 100644 index 000000000000..bce8fcdcc2ac --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableUInt8HolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableUInt8HolderReaderImpl extends AbstractFieldReader { + + private NullableUInt8Holder holder; + public NullableUInt8HolderReaderImpl(NullableUInt8Holder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.UINT8; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(UInt8Holder h) { + h.value = holder.value; + } + + @Override + public void read(NullableUInt8Holder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Long readLong() { + if (!isSet()) { + return null; + } + + Long value = new Long(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readLong(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableVarBinaryHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableVarBinaryHolderReaderImpl.java new file mode 100644 index 000000000000..af98e29133c6 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableVarBinaryHolderReaderImpl.java @@ -0,0 +1,133 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableVarBinaryHolderReaderImpl extends AbstractFieldReader { + + private NullableVarBinaryHolder holder; + public NullableVarBinaryHolderReaderImpl(NullableVarBinaryHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.VARBINARY; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(VarBinaryHolder h) { + h.start = holder.start; + h.end = holder.end; + h.buffer = holder.buffer; + } + + @Override + public void read(NullableVarBinaryHolder h) { + h.start = holder.start; + h.end = holder.end; + h.buffer = holder.buffer; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public byte[] readByteArray() { + if (!isSet()) { + return null; + } + + int length = holder.end - holder.start; + byte[] value = new byte [length]; + holder.buffer.getBytes(holder.start, value, 0, length); + return value; + } + + @Override + public Object readObject() { + return readByteArray(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableVarCharHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableVarCharHolderReaderImpl.java new file mode 100644 index 000000000000..4faa73dc69f8 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/NullableVarCharHolderReaderImpl.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class NullableVarCharHolderReaderImpl extends AbstractFieldReader { + + private NullableVarCharHolder holder; + public NullableVarCharHolderReaderImpl(NullableVarCharHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.VARCHAR; + } + + @Override + public boolean isSet() { + return this.holder.isSet == 1; + } + + @Override + public void read(VarCharHolder h) { + h.start = holder.start; + h.end = holder.end; + h.buffer = holder.buffer; + } + + @Override + public void read(NullableVarCharHolder h) { + h.start = holder.start; + h.end = holder.end; + h.buffer = holder.buffer; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Text readText() { + if (!isSet()) { + return null; + } + + int length = holder.end - holder.start; + byte[] value = new byte [length]; + holder.buffer.getBytes(holder.start, value, 0, length); + Text text = new Text(); + text.set(value); + return text; + } + + @Override + public Object readObject() { + return readText(); + } + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/SingleCaseSensitiveStructWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/SingleCaseSensitiveStructWriter.java new file mode 100644 index 000000000000..127ef5583ee7 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/SingleCaseSensitiveStructWriter.java @@ -0,0 +1,81 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker and the CaseSensitiveStructWriters.java template. + */ +@SuppressWarnings("unused") +public class SingleCaseSensitiveStructWriter extends SingleStructWriter { + public SingleCaseSensitiveStructWriter(NonNullableStructVector container) { + super(container); + } + + @Override + protected String handleCase(final String input){ + return input; + } + + @Override + protected NullableStructWriterFactory getNullableStructWriterFactory() { + return NullableStructWriterFactory.getNullableCaseSensitiveStructWriterFactoryInstance(); + } + +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/SingleStructWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/SingleStructWriter.java new file mode 100644 index 000000000000..739d29a9fb1a --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/SingleStructWriter.java @@ -0,0 +1,1643 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +import java.util.Map; +import java.util.HashMap; + +import org.apache.arrow.vector.holders.RepeatedStructHolder; +import org.apache.arrow.vector.AllocationHelper; +import org.apache.arrow.vector.complex.reader.FieldReader; +import org.apache.arrow.vector.complex.writer.FieldWriter; + + +/* + * This class is generated using FreeMarker and the StructWriters.java template. + */ +@SuppressWarnings("unused") +public class SingleStructWriter extends AbstractFieldWriter { + + protected final NonNullableStructVector container; + private int initialCapacity; + private final Map fields = new HashMap<>(); + public SingleStructWriter(NonNullableStructVector container) { + if (container instanceof StructVector) { + throw new IllegalArgumentException("Invalid container: " + container); + } + this.container = container; + this.initialCapacity = 0; + for (Field child : container.getField().getChildren()) { + MinorType minorType = Types.getMinorTypeForArrowType(child.getType()); + switch (minorType) { + case STRUCT: + struct(child.getName()); + break; + case LIST: + list(child.getName()); + break; + case MAP: { + ArrowType.Map arrowType = (ArrowType.Map) child.getType(); + map(child.getName(), arrowType.getKeysSorted()); + break; + } + case DENSEUNION: { + FieldType fieldType = new FieldType(addVectorAsNullable, MinorType.DENSEUNION.getType(), null, null); + DenseUnionWriter writer = new DenseUnionWriter(container.addOrGet(child.getName(), fieldType, DenseUnionVector.class), getNullableStructWriterFactory()); + fields.put(handleCase(child.getName()), writer); + break; + } + case UNION: + FieldType fieldType = new FieldType(addVectorAsNullable, MinorType.UNION.getType(), null, null); + UnionWriter writer = new UnionWriter(container.addOrGet(child.getName(), fieldType, UnionVector.class), getNullableStructWriterFactory()); + fields.put(handleCase(child.getName()), writer); + break; + case TINYINT: { + tinyInt(child.getName()); + break; + } + case UINT1: { + uInt1(child.getName()); + break; + } + case UINT2: { + uInt2(child.getName()); + break; + } + case SMALLINT: { + smallInt(child.getName()); + break; + } + case FLOAT2: { + float2(child.getName()); + break; + } + case INT: { + integer(child.getName()); + break; + } + case UINT4: { + uInt4(child.getName()); + break; + } + case FLOAT4: { + float4(child.getName()); + break; + } + case DATEDAY: { + dateDay(child.getName()); + break; + } + case INTERVALYEAR: { + intervalYear(child.getName()); + break; + } + case TIMESEC: { + timeSec(child.getName()); + break; + } + case TIMEMILLI: { + timeMilli(child.getName()); + break; + } + case BIGINT: { + bigInt(child.getName()); + break; + } + case UINT8: { + uInt8(child.getName()); + break; + } + case FLOAT8: { + float8(child.getName()); + break; + } + case DATEMILLI: { + dateMilli(child.getName()); + break; + } + case DURATION: { + org.apache.arrow.vector.types.pojo.ArrowType.Duration arrowType = (org.apache.arrow.vector.types.pojo.ArrowType.Duration)child.getType(); + duration(child.getName(), arrowType.getUnit()); + break; + } + case TIMESTAMPSEC: { + timeStampSec(child.getName()); + break; + } + case TIMESTAMPMILLI: { + timeStampMilli(child.getName()); + break; + } + case TIMESTAMPMICRO: { + timeStampMicro(child.getName()); + break; + } + case TIMESTAMPNANO: { + timeStampNano(child.getName()); + break; + } + case TIMESTAMPSECTZ: { + org.apache.arrow.vector.types.pojo.ArrowType.Timestamp arrowType = (org.apache.arrow.vector.types.pojo.ArrowType.Timestamp)child.getType(); + timeStampSecTZ(child.getName(), arrowType.getTimezone()); + break; + } + case TIMESTAMPMILLITZ: { + org.apache.arrow.vector.types.pojo.ArrowType.Timestamp arrowType = (org.apache.arrow.vector.types.pojo.ArrowType.Timestamp)child.getType(); + timeStampMilliTZ(child.getName(), arrowType.getTimezone()); + break; + } + case TIMESTAMPMICROTZ: { + org.apache.arrow.vector.types.pojo.ArrowType.Timestamp arrowType = (org.apache.arrow.vector.types.pojo.ArrowType.Timestamp)child.getType(); + timeStampMicroTZ(child.getName(), arrowType.getTimezone()); + break; + } + case TIMESTAMPNANOTZ: { + org.apache.arrow.vector.types.pojo.ArrowType.Timestamp arrowType = (org.apache.arrow.vector.types.pojo.ArrowType.Timestamp)child.getType(); + timeStampNanoTZ(child.getName(), arrowType.getTimezone()); + break; + } + case TIMEMICRO: { + timeMicro(child.getName()); + break; + } + case TIMENANO: { + timeNano(child.getName()); + break; + } + case INTERVALDAY: { + intervalDay(child.getName()); + break; + } + case INTERVALMONTHDAYNANO: { + intervalMonthDayNano(child.getName()); + break; + } + case DECIMAL256: { + org.apache.arrow.vector.types.pojo.ArrowType.Decimal arrowType = (org.apache.arrow.vector.types.pojo.ArrowType.Decimal)child.getType(); + decimal256(child.getName(), arrowType.getScale(), arrowType.getPrecision()); + break; + } + case DECIMAL: { + org.apache.arrow.vector.types.pojo.ArrowType.Decimal arrowType = (org.apache.arrow.vector.types.pojo.ArrowType.Decimal)child.getType(); + decimal(child.getName(), arrowType.getScale(), arrowType.getPrecision()); + break; + } + case FIXEDSIZEBINARY: { + org.apache.arrow.vector.types.pojo.ArrowType.FixedSizeBinary arrowType = (org.apache.arrow.vector.types.pojo.ArrowType.FixedSizeBinary)child.getType(); + fixedSizeBinary(child.getName(), arrowType.getByteWidth()); + break; + } + case VARBINARY: { + varBinary(child.getName()); + break; + } + case VARCHAR: { + varChar(child.getName()); + break; + } + case LARGEVARCHAR: { + largeVarChar(child.getName()); + break; + } + case LARGEVARBINARY: { + largeVarBinary(child.getName()); + break; + } + case BIT: { + bit(child.getName()); + break; + } + default: + throw new UnsupportedOperationException("Unknown type: " + minorType); + } + } + } + + protected String handleCase(final String input) { + return input.toLowerCase(); + } + + protected NullableStructWriterFactory getNullableStructWriterFactory() { + return NullableStructWriterFactory.getNullableStructWriterFactoryInstance(); + } + + @Override + public int getValueCapacity() { + return container.getValueCapacity(); + } + + public void setInitialCapacity(int initialCapacity) { + this.initialCapacity = initialCapacity; + container.setInitialCapacity(initialCapacity); + } + + @Override + public boolean isEmptyStruct() { + return 0 == container.size(); + } + + @Override + public Field getField() { + return container.getField(); + } + + @Override + public StructWriter struct(String name) { + String finalName = handleCase(name); + FieldWriter writer = fields.get(finalName); + if(writer == null){ + int vectorCount=container.size(); + FieldType fieldType = new FieldType(addVectorAsNullable, MinorType.STRUCT.getType(), null, null); + StructVector vector = container.addOrGet(name, fieldType, StructVector.class); + writer = new PromotableWriter(vector, container, getNullableStructWriterFactory()); + if(vectorCount != container.size()) { + writer.allocate(); + } + writer.setPosition(idx()); + fields.put(finalName, writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.STRUCT); + } + } + return writer; + } + + @Override + public void close() throws Exception { + clear(); + container.close(); + } + + @Override + public void allocate() { + container.allocateNew(); + for(final FieldWriter w : fields.values()) { + w.allocate(); + } + } + + @Override + public void clear() { + container.clear(); + for(final FieldWriter w : fields.values()) { + w.clear(); + } + } + + @Override + public ListWriter list(String name) { + String finalName = handleCase(name); + FieldWriter writer = fields.get(finalName); + int vectorCount = container.size(); + if(writer == null) { + FieldType fieldType = new FieldType(addVectorAsNullable, MinorType.LIST.getType(), null, null); + writer = new PromotableWriter(container.addOrGet(name, fieldType, ListVector.class), container, getNullableStructWriterFactory()); + if (container.size() > vectorCount) { + writer.allocate(); + } + writer.setPosition(idx()); + fields.put(finalName, writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.LIST); + } + } + return writer; + } + + @Override + public MapWriter map(String name) { + return map(name, false); + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + MapVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + new ArrowType.Map(keysSorted) + ,null, null), + MapVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.MAP, new ArrowType.Map(keysSorted)); + } + } + return writer; + } + + public void setValueCount(int count) { + container.setValueCount(count); + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + for(final FieldWriter w: fields.values()) { + w.setPosition(index); + } + } + + + @Override + public void start() { + } + + @Override + public void end() { + setPosition(idx()+1); + } + + + @Override + public TinyIntWriter tinyInt(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TinyIntVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.TINYINT.getType() + ,null, null), + TinyIntVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.TINYINT); + } + } + return writer; + } + + + @Override + public UInt1Writer uInt1(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + UInt1Vector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.UINT1.getType() + ,null, null), + UInt1Vector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.UINT1); + } + } + return writer; + } + + + @Override + public UInt2Writer uInt2(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + UInt2Vector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.UINT2.getType() + ,null, null), + UInt2Vector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.UINT2); + } + } + return writer; + } + + + @Override + public SmallIntWriter smallInt(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + SmallIntVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.SMALLINT.getType() + ,null, null), + SmallIntVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.SMALLINT); + } + } + return writer; + } + + + @Override + public Float2Writer float2(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + Float2Vector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.FLOAT2.getType() + ,null, null), + Float2Vector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.FLOAT2); + } + } + return writer; + } + + + @Override + public IntWriter integer(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + IntVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.INT.getType() + ,null, null), + IntVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.INT); + } + } + return writer; + } + + + @Override + public UInt4Writer uInt4(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + UInt4Vector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.UINT4.getType() + ,null, null), + UInt4Vector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.UINT4); + } + } + return writer; + } + + + @Override + public Float4Writer float4(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + Float4Vector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.FLOAT4.getType() + ,null, null), + Float4Vector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.FLOAT4); + } + } + return writer; + } + + + @Override + public DateDayWriter dateDay(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + DateDayVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.DATEDAY.getType() + ,null, null), + DateDayVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.DATEDAY); + } + } + return writer; + } + + + @Override + public IntervalYearWriter intervalYear(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + IntervalYearVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.INTERVALYEAR.getType() + ,null, null), + IntervalYearVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.INTERVALYEAR); + } + } + return writer; + } + + + @Override + public TimeSecWriter timeSec(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeSecVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.TIMESEC.getType() + ,null, null), + TimeSecVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.TIMESEC); + } + } + return writer; + } + + + @Override + public TimeMilliWriter timeMilli(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeMilliVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.TIMEMILLI.getType() + ,null, null), + TimeMilliVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.TIMEMILLI); + } + } + return writer; + } + + + @Override + public BigIntWriter bigInt(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + BigIntVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.BIGINT.getType() + ,null, null), + BigIntVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.BIGINT); + } + } + return writer; + } + + + @Override + public UInt8Writer uInt8(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + UInt8Vector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.UINT8.getType() + ,null, null), + UInt8Vector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.UINT8); + } + } + return writer; + } + + + @Override + public Float8Writer float8(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + Float8Vector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.FLOAT8.getType() + ,null, null), + Float8Vector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.FLOAT8); + } + } + return writer; + } + + + @Override + public DateMilliWriter dateMilli(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + DateMilliVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.DATEMILLI.getType() + ,null, null), + DateMilliVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.DATEMILLI); + } + } + return writer; + } + + + @Override + public DurationWriter duration(String name) { + // returns existing writer + final FieldWriter writer = fields.get(handleCase(name)); + Preconditions.checkNotNull(writer); + return writer; + } + + @Override + public DurationWriter duration(String name, org.apache.arrow.vector.types.TimeUnit unit) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + DurationVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + new org.apache.arrow.vector.types.pojo.ArrowType.Duration(unit) + ,null, null), + DurationVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ArrowType arrowType = new org.apache.arrow.vector.types.pojo.ArrowType.Duration(unit); + ((PromotableWriter)writer).getWriter(MinorType.DURATION, arrowType); + } + } + return writer; + } + + + @Override + public TimeStampSecWriter timeStampSec(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeStampSecVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.TIMESTAMPSEC.getType() + ,null, null), + TimeStampSecVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.TIMESTAMPSEC); + } + } + return writer; + } + + + @Override + public TimeStampMilliWriter timeStampMilli(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeStampMilliVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.TIMESTAMPMILLI.getType() + ,null, null), + TimeStampMilliVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.TIMESTAMPMILLI); + } + } + return writer; + } + + + @Override + public TimeStampMicroWriter timeStampMicro(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeStampMicroVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.TIMESTAMPMICRO.getType() + ,null, null), + TimeStampMicroVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.TIMESTAMPMICRO); + } + } + return writer; + } + + + @Override + public TimeStampNanoWriter timeStampNano(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeStampNanoVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.TIMESTAMPNANO.getType() + ,null, null), + TimeStampNanoVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.TIMESTAMPNANO); + } + } + return writer; + } + + + @Override + public TimeStampSecTZWriter timeStampSecTZ(String name) { + // returns existing writer + final FieldWriter writer = fields.get(handleCase(name)); + Preconditions.checkNotNull(writer); + return writer; + } + + @Override + public TimeStampSecTZWriter timeStampSecTZ(String name, String timezone) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeStampSecTZVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + new org.apache.arrow.vector.types.pojo.ArrowType.Timestamp(org.apache.arrow.vector.types.TimeUnit.SECOND, timezone) + ,null, null), + TimeStampSecTZVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ArrowType arrowType = new org.apache.arrow.vector.types.pojo.ArrowType.Timestamp(org.apache.arrow.vector.types.TimeUnit.SECOND, timezone); + ((PromotableWriter)writer).getWriter(MinorType.TIMESTAMPSECTZ, arrowType); + } + } + return writer; + } + + + @Override + public TimeStampMilliTZWriter timeStampMilliTZ(String name) { + // returns existing writer + final FieldWriter writer = fields.get(handleCase(name)); + Preconditions.checkNotNull(writer); + return writer; + } + + @Override + public TimeStampMilliTZWriter timeStampMilliTZ(String name, String timezone) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeStampMilliTZVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + new org.apache.arrow.vector.types.pojo.ArrowType.Timestamp(org.apache.arrow.vector.types.TimeUnit.MILLISECOND, timezone) + ,null, null), + TimeStampMilliTZVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ArrowType arrowType = new org.apache.arrow.vector.types.pojo.ArrowType.Timestamp(org.apache.arrow.vector.types.TimeUnit.MILLISECOND, timezone); + ((PromotableWriter)writer).getWriter(MinorType.TIMESTAMPMILLITZ, arrowType); + } + } + return writer; + } + + + @Override + public TimeStampMicroTZWriter timeStampMicroTZ(String name) { + // returns existing writer + final FieldWriter writer = fields.get(handleCase(name)); + Preconditions.checkNotNull(writer); + return writer; + } + + @Override + public TimeStampMicroTZWriter timeStampMicroTZ(String name, String timezone) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeStampMicroTZVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + new org.apache.arrow.vector.types.pojo.ArrowType.Timestamp(org.apache.arrow.vector.types.TimeUnit.MICROSECOND, timezone) + ,null, null), + TimeStampMicroTZVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ArrowType arrowType = new org.apache.arrow.vector.types.pojo.ArrowType.Timestamp(org.apache.arrow.vector.types.TimeUnit.MICROSECOND, timezone); + ((PromotableWriter)writer).getWriter(MinorType.TIMESTAMPMICROTZ, arrowType); + } + } + return writer; + } + + + @Override + public TimeStampNanoTZWriter timeStampNanoTZ(String name) { + // returns existing writer + final FieldWriter writer = fields.get(handleCase(name)); + Preconditions.checkNotNull(writer); + return writer; + } + + @Override + public TimeStampNanoTZWriter timeStampNanoTZ(String name, String timezone) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeStampNanoTZVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + new org.apache.arrow.vector.types.pojo.ArrowType.Timestamp(org.apache.arrow.vector.types.TimeUnit.NANOSECOND, timezone) + ,null, null), + TimeStampNanoTZVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ArrowType arrowType = new org.apache.arrow.vector.types.pojo.ArrowType.Timestamp(org.apache.arrow.vector.types.TimeUnit.NANOSECOND, timezone); + ((PromotableWriter)writer).getWriter(MinorType.TIMESTAMPNANOTZ, arrowType); + } + } + return writer; + } + + + @Override + public TimeMicroWriter timeMicro(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeMicroVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.TIMEMICRO.getType() + ,null, null), + TimeMicroVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.TIMEMICRO); + } + } + return writer; + } + + + @Override + public TimeNanoWriter timeNano(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + TimeNanoVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.TIMENANO.getType() + ,null, null), + TimeNanoVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.TIMENANO); + } + } + return writer; + } + + + @Override + public IntervalDayWriter intervalDay(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + IntervalDayVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.INTERVALDAY.getType() + ,null, null), + IntervalDayVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.INTERVALDAY); + } + } + return writer; + } + + + @Override + public IntervalMonthDayNanoWriter intervalMonthDayNano(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + IntervalMonthDayNanoVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.INTERVALMONTHDAYNANO.getType() + ,null, null), + IntervalMonthDayNanoVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.INTERVALMONTHDAYNANO); + } + } + return writer; + } + + + @Override + public Decimal256Writer decimal256(String name) { + // returns existing writer + final FieldWriter writer = fields.get(handleCase(name)); + Preconditions.checkNotNull(writer); + return writer; + } + + @Override + public Decimal256Writer decimal256(String name, int scale, int precision) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + Decimal256Vector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + new org.apache.arrow.vector.types.pojo.ArrowType.Decimal(precision, scale, Decimal256Vector.TYPE_WIDTH * 8) + ,null, null), + Decimal256Vector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.DECIMAL256, new org.apache.arrow.vector.types.pojo.ArrowType.Decimal(precision, scale, Decimal256Vector.TYPE_WIDTH * 8)); + } + } + return writer; + } + + + @Override + public DecimalWriter decimal(String name) { + // returns existing writer + final FieldWriter writer = fields.get(handleCase(name)); + Preconditions.checkNotNull(writer); + return writer; + } + + @Override + public DecimalWriter decimal(String name, int scale, int precision) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + DecimalVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + new org.apache.arrow.vector.types.pojo.ArrowType.Decimal(precision, scale, DecimalVector.TYPE_WIDTH * 8) + ,null, null), + DecimalVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.DECIMAL, new org.apache.arrow.vector.types.pojo.ArrowType.Decimal(precision, scale, DecimalVector.TYPE_WIDTH * 8)); + } + } + return writer; + } + + + @Override + public FixedSizeBinaryWriter fixedSizeBinary(String name) { + // returns existing writer + final FieldWriter writer = fields.get(handleCase(name)); + Preconditions.checkNotNull(writer); + return writer; + } + + @Override + public FixedSizeBinaryWriter fixedSizeBinary(String name, int byteWidth) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + FixedSizeBinaryVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + new org.apache.arrow.vector.types.pojo.ArrowType.FixedSizeBinary(byteWidth) + ,null, null), + FixedSizeBinaryVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ArrowType arrowType = new org.apache.arrow.vector.types.pojo.ArrowType.FixedSizeBinary(byteWidth); + ((PromotableWriter)writer).getWriter(MinorType.FIXEDSIZEBINARY, arrowType); + } + } + return writer; + } + + + @Override + public VarBinaryWriter varBinary(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + VarBinaryVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.VARBINARY.getType() + ,null, null), + VarBinaryVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.VARBINARY); + } + } + return writer; + } + + + @Override + public VarCharWriter varChar(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + VarCharVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.VARCHAR.getType() + ,null, null), + VarCharVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.VARCHAR); + } + } + return writer; + } + + + @Override + public LargeVarCharWriter largeVarChar(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + LargeVarCharVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.LARGEVARCHAR.getType() + ,null, null), + LargeVarCharVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.LARGEVARCHAR); + } + } + return writer; + } + + + @Override + public LargeVarBinaryWriter largeVarBinary(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + LargeVarBinaryVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.LARGEVARBINARY.getType() + ,null, null), + LargeVarBinaryVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.LARGEVARBINARY); + } + } + return writer; + } + + + @Override + public BitWriter bit(String name) { + FieldWriter writer = fields.get(handleCase(name)); + if(writer == null) { + ValueVector vector; + ValueVector currentVector = container.getChild(name); + BitVector v = container.addOrGet(name, + new FieldType(addVectorAsNullable, + MinorType.BIT.getType() + ,null, null), + BitVector.class); + writer = new PromotableWriter(v, container, getNullableStructWriterFactory()); + vector = v; + if (currentVector == null || currentVector != vector) { + if(this.initialCapacity > 0) { + vector.setInitialCapacity(this.initialCapacity); + } + vector.allocateNewSafe(); + } + writer.setPosition(idx()); + fields.put(handleCase(name), writer); + } else { + if (writer instanceof PromotableWriter) { + // ensure writers are initialized + ((PromotableWriter)writer).getWriter(MinorType.BIT); + } + } + return writer; + } + + +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/SmallIntHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/SmallIntHolderReaderImpl.java new file mode 100644 index 000000000000..af95dab4ac79 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/SmallIntHolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class SmallIntHolderReaderImpl extends AbstractFieldReader { + + private SmallIntHolder holder; + public SmallIntHolderReaderImpl(SmallIntHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.SMALLINT; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(SmallIntHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableSmallIntHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Short readShort() { + + Short value = new Short(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readShort(); + } + + public void copyAsValue(SmallIntWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/SmallIntReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/SmallIntReaderImpl.java new file mode 100644 index 000000000000..a31522793043 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/SmallIntReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class SmallIntReaderImpl extends AbstractFieldReader { + + private final SmallIntVector vector; + + public SmallIntReaderImpl(SmallIntVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(SmallIntWriter writer){ + SmallIntWriterImpl impl = (SmallIntWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + SmallIntWriterImpl impl = (SmallIntWriterImpl) writer.smallInt(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableSmallIntHolder h){ + vector.get(idx(), h); + } + + public Short readShort(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/SmallIntWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/SmallIntWriterImpl.java new file mode 100644 index 000000000000..f3d0cf2fda45 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/SmallIntWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class SmallIntWriterImpl extends AbstractFieldWriter { + + final SmallIntVector vector; + + +public SmallIntWriterImpl(SmallIntVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(SmallIntHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableSmallIntHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeSmallInt(short value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeMicroHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeMicroHolderReaderImpl.java new file mode 100644 index 000000000000..42b4d072197c --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeMicroHolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class TimeMicroHolderReaderImpl extends AbstractFieldReader { + + private TimeMicroHolder holder; + public TimeMicroHolderReaderImpl(TimeMicroHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMEMICRO; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(TimeMicroHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableTimeMicroHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Long readLong() { + + Long value = new Long(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readLong(); + } + + public void copyAsValue(TimeMicroWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeMicroReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeMicroReaderImpl.java new file mode 100644 index 000000000000..5f6fc3c2020b --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeMicroReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class TimeMicroReaderImpl extends AbstractFieldReader { + + private final TimeMicroVector vector; + + public TimeMicroReaderImpl(TimeMicroVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(TimeMicroWriter writer){ + TimeMicroWriterImpl impl = (TimeMicroWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + TimeMicroWriterImpl impl = (TimeMicroWriterImpl) writer.timeMicro(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableTimeMicroHolder h){ + vector.get(idx(), h); + } + + public Long readLong(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeMicroWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeMicroWriterImpl.java new file mode 100644 index 000000000000..8f44664d4fea --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeMicroWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class TimeMicroWriterImpl extends AbstractFieldWriter { + + final TimeMicroVector vector; + + +public TimeMicroWriterImpl(TimeMicroVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(TimeMicroHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableTimeMicroHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeTimeMicro(long value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeMilliHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeMilliHolderReaderImpl.java new file mode 100644 index 000000000000..af187f4c0fdf --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeMilliHolderReaderImpl.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class TimeMilliHolderReaderImpl extends AbstractFieldReader { + + private TimeMilliHolder holder; + public TimeMilliHolderReaderImpl(TimeMilliHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMEMILLI; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(TimeMilliHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableTimeMilliHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public LocalDateTime readLocalDateTime() { + + return DateUtility.getLocalDateTimeFromEpochMilli(holder.value); + } + + @Override + public Object readObject() { + return readLocalDateTime(); + } + + public void copyAsValue(TimeMilliWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeMilliReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeMilliReaderImpl.java new file mode 100644 index 000000000000..dce05fc56b17 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeMilliReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class TimeMilliReaderImpl extends AbstractFieldReader { + + private final TimeMilliVector vector; + + public TimeMilliReaderImpl(TimeMilliVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(TimeMilliWriter writer){ + TimeMilliWriterImpl impl = (TimeMilliWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + TimeMilliWriterImpl impl = (TimeMilliWriterImpl) writer.timeMilli(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableTimeMilliHolder h){ + vector.get(idx(), h); + } + + public LocalDateTime readLocalDateTime(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeMilliWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeMilliWriterImpl.java new file mode 100644 index 000000000000..dd244dfed860 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeMilliWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class TimeMilliWriterImpl extends AbstractFieldWriter { + + final TimeMilliVector vector; + + +public TimeMilliWriterImpl(TimeMilliVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(TimeMilliHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableTimeMilliHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeTimeMilli(int value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeNanoHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeNanoHolderReaderImpl.java new file mode 100644 index 000000000000..39e27fc8490b --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeNanoHolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class TimeNanoHolderReaderImpl extends AbstractFieldReader { + + private TimeNanoHolder holder; + public TimeNanoHolderReaderImpl(TimeNanoHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMENANO; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(TimeNanoHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableTimeNanoHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Long readLong() { + + Long value = new Long(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readLong(); + } + + public void copyAsValue(TimeNanoWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeNanoReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeNanoReaderImpl.java new file mode 100644 index 000000000000..d954dfd403d1 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeNanoReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class TimeNanoReaderImpl extends AbstractFieldReader { + + private final TimeNanoVector vector; + + public TimeNanoReaderImpl(TimeNanoVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(TimeNanoWriter writer){ + TimeNanoWriterImpl impl = (TimeNanoWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + TimeNanoWriterImpl impl = (TimeNanoWriterImpl) writer.timeNano(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableTimeNanoHolder h){ + vector.get(idx(), h); + } + + public Long readLong(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeNanoWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeNanoWriterImpl.java new file mode 100644 index 000000000000..350d22b48b6b --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeNanoWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class TimeNanoWriterImpl extends AbstractFieldWriter { + + final TimeNanoVector vector; + + +public TimeNanoWriterImpl(TimeNanoVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(TimeNanoHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableTimeNanoHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeTimeNano(long value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeSecHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeSecHolderReaderImpl.java new file mode 100644 index 000000000000..0b84240ff7fd --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeSecHolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class TimeSecHolderReaderImpl extends AbstractFieldReader { + + private TimeSecHolder holder; + public TimeSecHolderReaderImpl(TimeSecHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMESEC; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(TimeSecHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableTimeSecHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Integer readInteger() { + + Integer value = new Integer(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readInteger(); + } + + public void copyAsValue(TimeSecWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeSecReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeSecReaderImpl.java new file mode 100644 index 000000000000..259dfeda5137 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeSecReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class TimeSecReaderImpl extends AbstractFieldReader { + + private final TimeSecVector vector; + + public TimeSecReaderImpl(TimeSecVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(TimeSecWriter writer){ + TimeSecWriterImpl impl = (TimeSecWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + TimeSecWriterImpl impl = (TimeSecWriterImpl) writer.timeSec(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableTimeSecHolder h){ + vector.get(idx(), h); + } + + public Integer readInteger(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeSecWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeSecWriterImpl.java new file mode 100644 index 000000000000..3e611d284837 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeSecWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class TimeSecWriterImpl extends AbstractFieldWriter { + + final TimeSecVector vector; + + +public TimeSecWriterImpl(TimeSecVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(TimeSecHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableTimeSecHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeTimeSec(int value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMicroHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMicroHolderReaderImpl.java new file mode 100644 index 000000000000..178a69a440cd --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMicroHolderReaderImpl.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class TimeStampMicroHolderReaderImpl extends AbstractFieldReader { + + private TimeStampMicroHolder holder; + public TimeStampMicroHolderReaderImpl(TimeStampMicroHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMESTAMPMICRO; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(TimeStampMicroHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableTimeStampMicroHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public LocalDateTime readLocalDateTime() { + + return DateUtility.getLocalDateTimeFromEpochMicro(holder.value); + } + + @Override + public Object readObject() { + return readLocalDateTime(); + } + + public void copyAsValue(TimeStampMicroWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMicroReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMicroReaderImpl.java new file mode 100644 index 000000000000..08678e0d1c13 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMicroReaderImpl.java @@ -0,0 +1,118 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class TimeStampMicroReaderImpl extends AbstractFieldReader { + + private final TimeStampMicroVector vector; + + public TimeStampMicroReaderImpl(TimeStampMicroVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(TimeStampMicroWriter writer){ + TimeStampMicroWriterImpl impl = (TimeStampMicroWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + TimeStampMicroWriterImpl impl = (TimeStampMicroWriterImpl) writer.timeStampMicro(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableTimeStampMicroHolder h){ + vector.get(idx(), h); + } + + public LocalDateTime readLocalDateTime(){ + return vector.getObject(idx()); + } + + @Override + public Long readLong(){ + return vector.get(idx()); + } + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMicroTZHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMicroTZHolderReaderImpl.java new file mode 100644 index 000000000000..85f02b712e67 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMicroTZHolderReaderImpl.java @@ -0,0 +1,129 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class TimeStampMicroTZHolderReaderImpl extends AbstractFieldReader { + + private TimeStampMicroTZHolder holder; + public TimeStampMicroTZHolderReaderImpl(TimeStampMicroTZHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMESTAMPMICROTZ; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(TimeStampMicroTZHolder h) { + h.value = holder.value; + h.timezone = holder.timezone; + } + + @Override + public void read(NullableTimeStampMicroTZHolder h) { + h.value = holder.value; + h.timezone = holder.timezone; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Long readLong() { + + Long value = new Long(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readLong(); + } + + public void copyAsValue(TimeStampMicroTZWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMicroTZReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMicroTZReaderImpl.java new file mode 100644 index 000000000000..745d1eeae10b --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMicroTZReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class TimeStampMicroTZReaderImpl extends AbstractFieldReader { + + private final TimeStampMicroTZVector vector; + + public TimeStampMicroTZReaderImpl(TimeStampMicroTZVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(TimeStampMicroTZWriter writer){ + TimeStampMicroTZWriterImpl impl = (TimeStampMicroTZWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + TimeStampMicroTZWriterImpl impl = (TimeStampMicroTZWriterImpl) writer.timeStampMicroTZ(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableTimeStampMicroTZHolder h){ + vector.get(idx(), h); + } + + public Long readLong(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMicroTZWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMicroTZWriterImpl.java new file mode 100644 index 000000000000..db3d94029e2e --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMicroTZWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class TimeStampMicroTZWriterImpl extends AbstractFieldWriter { + + final TimeStampMicroTZVector vector; + + +public TimeStampMicroTZWriterImpl(TimeStampMicroTZVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(TimeStampMicroTZHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableTimeStampMicroTZHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeTimeStampMicroTZ(long value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMicroWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMicroWriterImpl.java new file mode 100644 index 000000000000..8034d0e1cefa --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMicroWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class TimeStampMicroWriterImpl extends AbstractFieldWriter { + + final TimeStampMicroVector vector; + + +public TimeStampMicroWriterImpl(TimeStampMicroVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(TimeStampMicroHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableTimeStampMicroHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeTimeStampMicro(long value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMilliHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMilliHolderReaderImpl.java new file mode 100644 index 000000000000..2397d9cee51a --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMilliHolderReaderImpl.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class TimeStampMilliHolderReaderImpl extends AbstractFieldReader { + + private TimeStampMilliHolder holder; + public TimeStampMilliHolderReaderImpl(TimeStampMilliHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMESTAMPMILLI; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(TimeStampMilliHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableTimeStampMilliHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public LocalDateTime readLocalDateTime() { + + return DateUtility.getLocalDateTimeFromEpochMilli(holder.value); + } + + @Override + public Object readObject() { + return readLocalDateTime(); + } + + public void copyAsValue(TimeStampMilliWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMilliReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMilliReaderImpl.java new file mode 100644 index 000000000000..884bad2c6338 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMilliReaderImpl.java @@ -0,0 +1,118 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class TimeStampMilliReaderImpl extends AbstractFieldReader { + + private final TimeStampMilliVector vector; + + public TimeStampMilliReaderImpl(TimeStampMilliVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(TimeStampMilliWriter writer){ + TimeStampMilliWriterImpl impl = (TimeStampMilliWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + TimeStampMilliWriterImpl impl = (TimeStampMilliWriterImpl) writer.timeStampMilli(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableTimeStampMilliHolder h){ + vector.get(idx(), h); + } + + public LocalDateTime readLocalDateTime(){ + return vector.getObject(idx()); + } + + @Override + public Long readLong(){ + return vector.get(idx()); + } + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMilliTZHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMilliTZHolderReaderImpl.java new file mode 100644 index 000000000000..5a79b5dcc0ca --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMilliTZHolderReaderImpl.java @@ -0,0 +1,129 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class TimeStampMilliTZHolderReaderImpl extends AbstractFieldReader { + + private TimeStampMilliTZHolder holder; + public TimeStampMilliTZHolderReaderImpl(TimeStampMilliTZHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMESTAMPMILLITZ; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(TimeStampMilliTZHolder h) { + h.value = holder.value; + h.timezone = holder.timezone; + } + + @Override + public void read(NullableTimeStampMilliTZHolder h) { + h.value = holder.value; + h.timezone = holder.timezone; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Long readLong() { + + Long value = new Long(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readLong(); + } + + public void copyAsValue(TimeStampMilliTZWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMilliTZReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMilliTZReaderImpl.java new file mode 100644 index 000000000000..41b464ff190c --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMilliTZReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class TimeStampMilliTZReaderImpl extends AbstractFieldReader { + + private final TimeStampMilliTZVector vector; + + public TimeStampMilliTZReaderImpl(TimeStampMilliTZVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(TimeStampMilliTZWriter writer){ + TimeStampMilliTZWriterImpl impl = (TimeStampMilliTZWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + TimeStampMilliTZWriterImpl impl = (TimeStampMilliTZWriterImpl) writer.timeStampMilliTZ(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableTimeStampMilliTZHolder h){ + vector.get(idx(), h); + } + + public Long readLong(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMilliTZWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMilliTZWriterImpl.java new file mode 100644 index 000000000000..812aca0024a4 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMilliTZWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class TimeStampMilliTZWriterImpl extends AbstractFieldWriter { + + final TimeStampMilliTZVector vector; + + +public TimeStampMilliTZWriterImpl(TimeStampMilliTZVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(TimeStampMilliTZHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableTimeStampMilliTZHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeTimeStampMilliTZ(long value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMilliWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMilliWriterImpl.java new file mode 100644 index 000000000000..f68f223122cb --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampMilliWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class TimeStampMilliWriterImpl extends AbstractFieldWriter { + + final TimeStampMilliVector vector; + + +public TimeStampMilliWriterImpl(TimeStampMilliVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(TimeStampMilliHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableTimeStampMilliHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeTimeStampMilli(long value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampNanoHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampNanoHolderReaderImpl.java new file mode 100644 index 000000000000..57d712791d1a --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampNanoHolderReaderImpl.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class TimeStampNanoHolderReaderImpl extends AbstractFieldReader { + + private TimeStampNanoHolder holder; + public TimeStampNanoHolderReaderImpl(TimeStampNanoHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMESTAMPNANO; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(TimeStampNanoHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableTimeStampNanoHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public LocalDateTime readLocalDateTime() { + + return DateUtility.getLocalDateTimeFromEpochNano(holder.value); + } + + @Override + public Object readObject() { + return readLocalDateTime(); + } + + public void copyAsValue(TimeStampNanoWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampNanoReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampNanoReaderImpl.java new file mode 100644 index 000000000000..71529bbaf9f5 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampNanoReaderImpl.java @@ -0,0 +1,118 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class TimeStampNanoReaderImpl extends AbstractFieldReader { + + private final TimeStampNanoVector vector; + + public TimeStampNanoReaderImpl(TimeStampNanoVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(TimeStampNanoWriter writer){ + TimeStampNanoWriterImpl impl = (TimeStampNanoWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + TimeStampNanoWriterImpl impl = (TimeStampNanoWriterImpl) writer.timeStampNano(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableTimeStampNanoHolder h){ + vector.get(idx(), h); + } + + public LocalDateTime readLocalDateTime(){ + return vector.getObject(idx()); + } + + @Override + public Long readLong(){ + return vector.get(idx()); + } + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampNanoTZHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampNanoTZHolderReaderImpl.java new file mode 100644 index 000000000000..840cf2d736e6 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampNanoTZHolderReaderImpl.java @@ -0,0 +1,129 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class TimeStampNanoTZHolderReaderImpl extends AbstractFieldReader { + + private TimeStampNanoTZHolder holder; + public TimeStampNanoTZHolderReaderImpl(TimeStampNanoTZHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMESTAMPNANOTZ; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(TimeStampNanoTZHolder h) { + h.value = holder.value; + h.timezone = holder.timezone; + } + + @Override + public void read(NullableTimeStampNanoTZHolder h) { + h.value = holder.value; + h.timezone = holder.timezone; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Long readLong() { + + Long value = new Long(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readLong(); + } + + public void copyAsValue(TimeStampNanoTZWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampNanoTZReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampNanoTZReaderImpl.java new file mode 100644 index 000000000000..8624fb27bf7f --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampNanoTZReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class TimeStampNanoTZReaderImpl extends AbstractFieldReader { + + private final TimeStampNanoTZVector vector; + + public TimeStampNanoTZReaderImpl(TimeStampNanoTZVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(TimeStampNanoTZWriter writer){ + TimeStampNanoTZWriterImpl impl = (TimeStampNanoTZWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + TimeStampNanoTZWriterImpl impl = (TimeStampNanoTZWriterImpl) writer.timeStampNanoTZ(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableTimeStampNanoTZHolder h){ + vector.get(idx(), h); + } + + public Long readLong(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampNanoTZWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampNanoTZWriterImpl.java new file mode 100644 index 000000000000..c53860dece20 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampNanoTZWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class TimeStampNanoTZWriterImpl extends AbstractFieldWriter { + + final TimeStampNanoTZVector vector; + + +public TimeStampNanoTZWriterImpl(TimeStampNanoTZVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(TimeStampNanoTZHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableTimeStampNanoTZHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeTimeStampNanoTZ(long value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampNanoWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampNanoWriterImpl.java new file mode 100644 index 000000000000..45fe1de8b6f1 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampNanoWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class TimeStampNanoWriterImpl extends AbstractFieldWriter { + + final TimeStampNanoVector vector; + + +public TimeStampNanoWriterImpl(TimeStampNanoVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(TimeStampNanoHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableTimeStampNanoHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeTimeStampNano(long value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampSecHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampSecHolderReaderImpl.java new file mode 100644 index 000000000000..2b5a6338849a --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampSecHolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class TimeStampSecHolderReaderImpl extends AbstractFieldReader { + + private TimeStampSecHolder holder; + public TimeStampSecHolderReaderImpl(TimeStampSecHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMESTAMPSEC; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(TimeStampSecHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableTimeStampSecHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public LocalDateTime readLocalDateTime() { + + final long millis = java.util.concurrent.TimeUnit.SECONDS.toMillis(holder.value); + return DateUtility.getLocalDateTimeFromEpochMilli(millis); + } + + @Override + public Object readObject() { + return readLocalDateTime(); + } + + public void copyAsValue(TimeStampSecWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampSecReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampSecReaderImpl.java new file mode 100644 index 000000000000..446dae47e436 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampSecReaderImpl.java @@ -0,0 +1,118 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class TimeStampSecReaderImpl extends AbstractFieldReader { + + private final TimeStampSecVector vector; + + public TimeStampSecReaderImpl(TimeStampSecVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(TimeStampSecWriter writer){ + TimeStampSecWriterImpl impl = (TimeStampSecWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + TimeStampSecWriterImpl impl = (TimeStampSecWriterImpl) writer.timeStampSec(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableTimeStampSecHolder h){ + vector.get(idx(), h); + } + + public LocalDateTime readLocalDateTime(){ + return vector.getObject(idx()); + } + + @Override + public Long readLong(){ + return vector.get(idx()); + } + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampSecTZHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampSecTZHolderReaderImpl.java new file mode 100644 index 000000000000..3d982e9da800 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampSecTZHolderReaderImpl.java @@ -0,0 +1,129 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class TimeStampSecTZHolderReaderImpl extends AbstractFieldReader { + + private TimeStampSecTZHolder holder; + public TimeStampSecTZHolderReaderImpl(TimeStampSecTZHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TIMESTAMPSECTZ; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(TimeStampSecTZHolder h) { + h.value = holder.value; + h.timezone = holder.timezone; + } + + @Override + public void read(NullableTimeStampSecTZHolder h) { + h.value = holder.value; + h.timezone = holder.timezone; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Long readLong() { + + Long value = new Long(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readLong(); + } + + public void copyAsValue(TimeStampSecTZWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampSecTZReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampSecTZReaderImpl.java new file mode 100644 index 000000000000..90ee01cf76b3 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampSecTZReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class TimeStampSecTZReaderImpl extends AbstractFieldReader { + + private final TimeStampSecTZVector vector; + + public TimeStampSecTZReaderImpl(TimeStampSecTZVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(TimeStampSecTZWriter writer){ + TimeStampSecTZWriterImpl impl = (TimeStampSecTZWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + TimeStampSecTZWriterImpl impl = (TimeStampSecTZWriterImpl) writer.timeStampSecTZ(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableTimeStampSecTZHolder h){ + vector.get(idx(), h); + } + + public Long readLong(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampSecTZWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampSecTZWriterImpl.java new file mode 100644 index 000000000000..d97812a2e141 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampSecTZWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class TimeStampSecTZWriterImpl extends AbstractFieldWriter { + + final TimeStampSecTZVector vector; + + +public TimeStampSecTZWriterImpl(TimeStampSecTZVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(TimeStampSecTZHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableTimeStampSecTZHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeTimeStampSecTZ(long value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampSecWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampSecWriterImpl.java new file mode 100644 index 000000000000..5a6c0e1375cc --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TimeStampSecWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class TimeStampSecWriterImpl extends AbstractFieldWriter { + + final TimeStampSecVector vector; + + +public TimeStampSecWriterImpl(TimeStampSecVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(TimeStampSecHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableTimeStampSecHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeTimeStampSec(long value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TinyIntHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TinyIntHolderReaderImpl.java new file mode 100644 index 000000000000..7d0b3ad5a2b8 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TinyIntHolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class TinyIntHolderReaderImpl extends AbstractFieldReader { + + private TinyIntHolder holder; + public TinyIntHolderReaderImpl(TinyIntHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.TINYINT; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(TinyIntHolder h) { + h.value = holder.value; + } + + @Override + public void read(NullableTinyIntHolder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Byte readByte() { + + Byte value = new Byte(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readByte(); + } + + public void copyAsValue(TinyIntWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TinyIntReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TinyIntReaderImpl.java new file mode 100644 index 000000000000..3bca259d6f6d --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TinyIntReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class TinyIntReaderImpl extends AbstractFieldReader { + + private final TinyIntVector vector; + + public TinyIntReaderImpl(TinyIntVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(TinyIntWriter writer){ + TinyIntWriterImpl impl = (TinyIntWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + TinyIntWriterImpl impl = (TinyIntWriterImpl) writer.tinyInt(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableTinyIntHolder h){ + vector.get(idx(), h); + } + + public Byte readByte(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TinyIntWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TinyIntWriterImpl.java new file mode 100644 index 000000000000..95361eb863a5 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/TinyIntWriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class TinyIntWriterImpl extends AbstractFieldWriter { + + final TinyIntVector vector; + + +public TinyIntWriterImpl(TinyIntVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(TinyIntHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableTinyIntHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeTinyInt(byte value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt1HolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt1HolderReaderImpl.java new file mode 100644 index 000000000000..2d377f48e85c --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt1HolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class UInt1HolderReaderImpl extends AbstractFieldReader { + + private UInt1Holder holder; + public UInt1HolderReaderImpl(UInt1Holder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.UINT1; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(UInt1Holder h) { + h.value = holder.value; + } + + @Override + public void read(NullableUInt1Holder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Byte readByte() { + + Byte value = new Byte(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readByte(); + } + + public void copyAsValue(UInt1Writer writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt1ReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt1ReaderImpl.java new file mode 100644 index 000000000000..e1a00700b8f1 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt1ReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class UInt1ReaderImpl extends AbstractFieldReader { + + private final UInt1Vector vector; + + public UInt1ReaderImpl(UInt1Vector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(UInt1Writer writer){ + UInt1WriterImpl impl = (UInt1WriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + UInt1WriterImpl impl = (UInt1WriterImpl) writer.uInt1(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableUInt1Holder h){ + vector.get(idx(), h); + } + + public Byte readByte(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt1WriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt1WriterImpl.java new file mode 100644 index 000000000000..f4ee29e28dfd --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt1WriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class UInt1WriterImpl extends AbstractFieldWriter { + + final UInt1Vector vector; + + +public UInt1WriterImpl(UInt1Vector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(UInt1Holder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableUInt1Holder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeUInt1(byte value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt2HolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt2HolderReaderImpl.java new file mode 100644 index 000000000000..fed9240ccdc0 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt2HolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class UInt2HolderReaderImpl extends AbstractFieldReader { + + private UInt2Holder holder; + public UInt2HolderReaderImpl(UInt2Holder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.UINT2; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(UInt2Holder h) { + h.value = holder.value; + } + + @Override + public void read(NullableUInt2Holder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Character readCharacter() { + + Character value = new Character(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readCharacter(); + } + + public void copyAsValue(UInt2Writer writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt2ReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt2ReaderImpl.java new file mode 100644 index 000000000000..34d847241909 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt2ReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class UInt2ReaderImpl extends AbstractFieldReader { + + private final UInt2Vector vector; + + public UInt2ReaderImpl(UInt2Vector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(UInt2Writer writer){ + UInt2WriterImpl impl = (UInt2WriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + UInt2WriterImpl impl = (UInt2WriterImpl) writer.uInt2(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableUInt2Holder h){ + vector.get(idx(), h); + } + + public Character readCharacter(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt2WriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt2WriterImpl.java new file mode 100644 index 000000000000..8f445b311635 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt2WriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class UInt2WriterImpl extends AbstractFieldWriter { + + final UInt2Vector vector; + + +public UInt2WriterImpl(UInt2Vector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(UInt2Holder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableUInt2Holder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeUInt2(char value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt4HolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt4HolderReaderImpl.java new file mode 100644 index 000000000000..e06d9febd9e8 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt4HolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class UInt4HolderReaderImpl extends AbstractFieldReader { + + private UInt4Holder holder; + public UInt4HolderReaderImpl(UInt4Holder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.UINT4; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(UInt4Holder h) { + h.value = holder.value; + } + + @Override + public void read(NullableUInt4Holder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Integer readInteger() { + + Integer value = new Integer(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readInteger(); + } + + public void copyAsValue(UInt4Writer writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt4ReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt4ReaderImpl.java new file mode 100644 index 000000000000..8028f973b8c5 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt4ReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class UInt4ReaderImpl extends AbstractFieldReader { + + private final UInt4Vector vector; + + public UInt4ReaderImpl(UInt4Vector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(UInt4Writer writer){ + UInt4WriterImpl impl = (UInt4WriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + UInt4WriterImpl impl = (UInt4WriterImpl) writer.uInt4(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableUInt4Holder h){ + vector.get(idx(), h); + } + + public Integer readInteger(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt4WriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt4WriterImpl.java new file mode 100644 index 000000000000..47d773c4312e --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt4WriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class UInt4WriterImpl extends AbstractFieldWriter { + + final UInt4Vector vector; + + +public UInt4WriterImpl(UInt4Vector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(UInt4Holder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableUInt4Holder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeUInt4(int value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt8HolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt8HolderReaderImpl.java new file mode 100644 index 000000000000..6808fee2deb0 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt8HolderReaderImpl.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class UInt8HolderReaderImpl extends AbstractFieldReader { + + private UInt8Holder holder; + public UInt8HolderReaderImpl(UInt8Holder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.UINT8; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(UInt8Holder h) { + h.value = holder.value; + } + + @Override + public void read(NullableUInt8Holder h) { + h.value = holder.value; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Long readLong() { + + Long value = new Long(this.holder.value); + return value; + } + + @Override + public Object readObject() { + return readLong(); + } + + public void copyAsValue(UInt8Writer writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt8ReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt8ReaderImpl.java new file mode 100644 index 000000000000..c5540f0a91e1 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt8ReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class UInt8ReaderImpl extends AbstractFieldReader { + + private final UInt8Vector vector; + + public UInt8ReaderImpl(UInt8Vector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(UInt8Writer writer){ + UInt8WriterImpl impl = (UInt8WriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + UInt8WriterImpl impl = (UInt8WriterImpl) writer.uInt8(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableUInt8Holder h){ + vector.get(idx(), h); + } + + public Long readLong(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt8WriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt8WriterImpl.java new file mode 100644 index 000000000000..d5f1e2b28ab2 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UInt8WriterImpl.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class UInt8WriterImpl extends AbstractFieldWriter { + + final UInt8Vector vector; + + +public UInt8WriterImpl(UInt8Vector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(UInt8Holder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableUInt8Holder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeUInt8(long value) { + vector.setSafe(idx(), 1, value); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UnionFixedSizeListWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UnionFixedSizeListWriter.java new file mode 100644 index 000000000000..c241fae98993 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UnionFixedSizeListWriter.java @@ -0,0 +1,1254 @@ + + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/* + * This class is generated using freemarker and the UnionFixedSizeListWriter.java template. + */ + +@SuppressWarnings("unused") +public class UnionFixedSizeListWriter extends AbstractFieldWriter { + + protected FixedSizeListVector vector; + protected PromotableWriter writer; + private boolean inStruct = false; + private String structName; + private final int listSize; + + public UnionFixedSizeListWriter(FixedSizeListVector vector) { + this(vector, NullableStructWriterFactory.getNullableStructWriterFactoryInstance()); + } + + public UnionFixedSizeListWriter(FixedSizeListVector vector, NullableStructWriterFactory nullableStructWriterFactory) { + this.vector = vector; + this.writer = new PromotableWriter(vector.getDataVector(), vector, nullableStructWriterFactory); + this.listSize = vector.getListSize(); + } + + public UnionFixedSizeListWriter(FixedSizeListVector vector, AbstractFieldWriter parent) { + this(vector); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + public Field getField() { + return vector.getField(); + } + + public void setValueCount(int count) { + vector.setValueCount(count); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void close() throws Exception { + vector.close(); + writer.close(); + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + } + + @Override + public TinyIntWriter tinyInt() { + return this; + } + + @Override + public TinyIntWriter tinyInt(String name) { + structName = name; + return writer.tinyInt(name); + } + + @Override + public UInt1Writer uInt1() { + return this; + } + + @Override + public UInt1Writer uInt1(String name) { + structName = name; + return writer.uInt1(name); + } + + @Override + public UInt2Writer uInt2() { + return this; + } + + @Override + public UInt2Writer uInt2(String name) { + structName = name; + return writer.uInt2(name); + } + + @Override + public SmallIntWriter smallInt() { + return this; + } + + @Override + public SmallIntWriter smallInt(String name) { + structName = name; + return writer.smallInt(name); + } + + @Override + public Float2Writer float2() { + return this; + } + + @Override + public Float2Writer float2(String name) { + structName = name; + return writer.float2(name); + } + + @Override + public IntWriter integer() { + return this; + } + + @Override + public IntWriter integer(String name) { + structName = name; + return writer.integer(name); + } + + @Override + public UInt4Writer uInt4() { + return this; + } + + @Override + public UInt4Writer uInt4(String name) { + structName = name; + return writer.uInt4(name); + } + + @Override + public Float4Writer float4() { + return this; + } + + @Override + public Float4Writer float4(String name) { + structName = name; + return writer.float4(name); + } + + @Override + public DateDayWriter dateDay() { + return this; + } + + @Override + public DateDayWriter dateDay(String name) { + structName = name; + return writer.dateDay(name); + } + + @Override + public IntervalYearWriter intervalYear() { + return this; + } + + @Override + public IntervalYearWriter intervalYear(String name) { + structName = name; + return writer.intervalYear(name); + } + + @Override + public TimeSecWriter timeSec() { + return this; + } + + @Override + public TimeSecWriter timeSec(String name) { + structName = name; + return writer.timeSec(name); + } + + @Override + public TimeMilliWriter timeMilli() { + return this; + } + + @Override + public TimeMilliWriter timeMilli(String name) { + structName = name; + return writer.timeMilli(name); + } + + @Override + public BigIntWriter bigInt() { + return this; + } + + @Override + public BigIntWriter bigInt(String name) { + structName = name; + return writer.bigInt(name); + } + + @Override + public UInt8Writer uInt8() { + return this; + } + + @Override + public UInt8Writer uInt8(String name) { + structName = name; + return writer.uInt8(name); + } + + @Override + public Float8Writer float8() { + return this; + } + + @Override + public Float8Writer float8(String name) { + structName = name; + return writer.float8(name); + } + + @Override + public DateMilliWriter dateMilli() { + return this; + } + + @Override + public DateMilliWriter dateMilli(String name) { + structName = name; + return writer.dateMilli(name); + } + + @Override + public TimeStampSecWriter timeStampSec() { + return this; + } + + @Override + public TimeStampSecWriter timeStampSec(String name) { + structName = name; + return writer.timeStampSec(name); + } + + @Override + public TimeStampMilliWriter timeStampMilli() { + return this; + } + + @Override + public TimeStampMilliWriter timeStampMilli(String name) { + structName = name; + return writer.timeStampMilli(name); + } + + @Override + public TimeStampMicroWriter timeStampMicro() { + return this; + } + + @Override + public TimeStampMicroWriter timeStampMicro(String name) { + structName = name; + return writer.timeStampMicro(name); + } + + @Override + public TimeStampNanoWriter timeStampNano() { + return this; + } + + @Override + public TimeStampNanoWriter timeStampNano(String name) { + structName = name; + return writer.timeStampNano(name); + } + + @Override + public TimeMicroWriter timeMicro() { + return this; + } + + @Override + public TimeMicroWriter timeMicro(String name) { + structName = name; + return writer.timeMicro(name); + } + + @Override + public TimeNanoWriter timeNano() { + return this; + } + + @Override + public TimeNanoWriter timeNano(String name) { + structName = name; + return writer.timeNano(name); + } + + @Override + public IntervalDayWriter intervalDay() { + return this; + } + + @Override + public IntervalDayWriter intervalDay(String name) { + structName = name; + return writer.intervalDay(name); + } + + @Override + public IntervalMonthDayNanoWriter intervalMonthDayNano() { + return this; + } + + @Override + public IntervalMonthDayNanoWriter intervalMonthDayNano(String name) { + structName = name; + return writer.intervalMonthDayNano(name); + } + + @Override + public VarBinaryWriter varBinary() { + return this; + } + + @Override + public VarBinaryWriter varBinary(String name) { + structName = name; + return writer.varBinary(name); + } + + @Override + public VarCharWriter varChar() { + return this; + } + + @Override + public VarCharWriter varChar(String name) { + structName = name; + return writer.varChar(name); + } + + @Override + public LargeVarCharWriter largeVarChar() { + return this; + } + + @Override + public LargeVarCharWriter largeVarChar(String name) { + structName = name; + return writer.largeVarChar(name); + } + + @Override + public LargeVarBinaryWriter largeVarBinary() { + return this; + } + + @Override + public LargeVarBinaryWriter largeVarBinary(String name) { + structName = name; + return writer.largeVarBinary(name); + } + + @Override + public BitWriter bit() { + return this; + } + + @Override + public BitWriter bit(String name) { + structName = name; + return writer.bit(name); + } + + @Override + public DecimalWriter decimal() { + return this; + } + + @Override + public DecimalWriter decimal(String name, int scale, int precision) { + return writer.decimal(name, scale, precision); + } + + @Override + public DecimalWriter decimal(String name) { + return writer.decimal(name); + } + + + @Override + public Decimal256Writer decimal256() { + return this; + } + + @Override + public Decimal256Writer decimal256(String name, int scale, int precision) { + return writer.decimal256(name, scale, precision); + } + + @Override + public Decimal256Writer decimal256(String name) { + return writer.decimal256(name); + } + + @Override + public StructWriter struct() { + inStruct = true; + return this; + } + + @Override + public ListWriter list() { + return writer; + } + + @Override + public ListWriter list(String name) { + ListWriter listWriter = writer.list(name); + return listWriter; + } + + @Override + public StructWriter struct(String name) { + StructWriter structWriter = writer.struct(name); + return structWriter; + } + + @Override + public MapWriter map() { + return writer; + } + + @Override + public MapWriter map(String name) { + MapWriter mapWriter = writer.map(name); + return mapWriter; + } + + @Override + public MapWriter map(boolean keysSorted) { + writer.map(keysSorted); + return writer; + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + MapWriter mapWriter = writer.map(name, keysSorted); + return mapWriter; + } + + @Override + public void startList() { + int start = vector.startNewValue(idx()); + writer.setPosition(start); + } + + @Override + public void endList() { + setPosition(idx() + 1); + } + + @Override + public void start() { + writer.start(); + } + + @Override + public void end() { + writer.end(); + inStruct = false; + } + + @Override + public void write(DecimalHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write(holder); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void write(Decimal256Holder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.write(holder); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeNull() { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeNull(); + } + + public void writeDecimal(long start, ArrowBuf buffer, ArrowType arrowType) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeDecimal(start, buffer, arrowType); + writer.setPosition(writer.idx() + 1); + } + + public void writeDecimal(BigDecimal value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeDecimal(value); + writer.setPosition(writer.idx() + 1); + } + + public void writeBigEndianBytesToDecimal(byte[] value, ArrowType arrowType) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeBigEndianBytesToDecimal(value, arrowType); + writer.setPosition(writer.idx() + 1); + } + + public void writeDecimal256(long start, ArrowBuf buffer, ArrowType arrowType) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeDecimal256(start, buffer, arrowType); + writer.setPosition(writer.idx() + 1); + } + + public void writeDecimal256(BigDecimal value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeDecimal256(value); + writer.setPosition(writer.idx() + 1); + } + + public void writeBigEndianBytesToDecimal256(byte[] value, ArrowType arrowType) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeBigEndianBytesToDecimal256(value, arrowType); + writer.setPosition(writer.idx() + 1); + } + + + + @Override + public void writeTinyInt(byte value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeTinyInt(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(TinyIntHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeTinyInt(holder.value); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeUInt1(byte value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeUInt1(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(UInt1Holder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeUInt1(holder.value); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeUInt2(char value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeUInt2(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(UInt2Holder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeUInt2(holder.value); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeSmallInt(short value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeSmallInt(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(SmallIntHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeSmallInt(holder.value); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeFloat2(short value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeFloat2(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(Float2Holder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeFloat2(holder.value); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeInt(int value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeInt(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(IntHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeInt(holder.value); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeUInt4(int value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeUInt4(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(UInt4Holder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeUInt4(holder.value); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeFloat4(float value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeFloat4(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(Float4Holder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeFloat4(holder.value); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeDateDay(int value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeDateDay(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(DateDayHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeDateDay(holder.value); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeIntervalYear(int value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeIntervalYear(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(IntervalYearHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeIntervalYear(holder.value); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeTimeSec(int value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeTimeSec(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(TimeSecHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeTimeSec(holder.value); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeTimeMilli(int value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeTimeMilli(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(TimeMilliHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeTimeMilli(holder.value); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeBigInt(long value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeBigInt(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(BigIntHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeBigInt(holder.value); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeUInt8(long value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeUInt8(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(UInt8Holder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeUInt8(holder.value); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeFloat8(double value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeFloat8(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(Float8Holder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeFloat8(holder.value); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeDateMilli(long value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeDateMilli(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(DateMilliHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeDateMilli(holder.value); + writer.setPosition(writer.idx() + 1); + } + + + + @Override + public void writeTimeStampSec(long value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeTimeStampSec(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(TimeStampSecHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeTimeStampSec(holder.value); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeTimeStampMilli(long value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeTimeStampMilli(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(TimeStampMilliHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeTimeStampMilli(holder.value); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeTimeStampMicro(long value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeTimeStampMicro(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(TimeStampMicroHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeTimeStampMicro(holder.value); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeTimeStampNano(long value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeTimeStampNano(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(TimeStampNanoHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeTimeStampNano(holder.value); + writer.setPosition(writer.idx() + 1); + } + + + + + + + @Override + public void writeTimeMicro(long value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeTimeMicro(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(TimeMicroHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeTimeMicro(holder.value); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeTimeNano(long value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeTimeNano(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(TimeNanoHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeTimeNano(holder.value); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeIntervalDay(int days, int milliseconds) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeIntervalDay(days, milliseconds); + writer.setPosition(writer.idx() + 1); + } + + public void write(IntervalDayHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeIntervalDay(holder.days, holder.milliseconds); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeIntervalMonthDayNano(int months, int days, long nanoseconds) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeIntervalMonthDayNano(months, days, nanoseconds); + writer.setPosition(writer.idx() + 1); + } + + public void write(IntervalMonthDayNanoHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeIntervalMonthDayNano(holder.months, holder.days, holder.nanoseconds); + writer.setPosition(writer.idx() + 1); + } + + + + + @Override + public void writeVarBinary(byte[] value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeVarBinary(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeVarBinary(byte[] value, int offset, int length) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeVarBinary(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeVarBinary(ByteBuffer value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeVarBinary(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeVarBinary(ByteBuffer value, int offset, int length) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeVarBinary(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeVarBinary(int start, int end, ArrowBuf buffer) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeVarBinary(start, end, buffer); + writer.setPosition(writer.idx() + 1); + } + + public void write(VarBinaryHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeVarBinary(holder.start, holder.end, holder.buffer); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeVarChar(Text value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeVarChar(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeVarChar(String value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeVarChar(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeVarChar(int start, int end, ArrowBuf buffer) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeVarChar(start, end, buffer); + writer.setPosition(writer.idx() + 1); + } + + public void write(VarCharHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeVarChar(holder.start, holder.end, holder.buffer); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeLargeVarChar(Text value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeLargeVarChar(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeLargeVarChar(String value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeLargeVarChar(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeLargeVarChar(long start, long end, ArrowBuf buffer) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeLargeVarChar(start, end, buffer); + writer.setPosition(writer.idx() + 1); + } + + public void write(LargeVarCharHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeLargeVarChar(holder.start, holder.end, holder.buffer); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeLargeVarBinary(byte[] value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeLargeVarBinary(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeLargeVarBinary(byte[] value, int offset, int length) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeLargeVarBinary(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeLargeVarBinary(ByteBuffer value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeLargeVarBinary(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeLargeVarBinary(ByteBuffer value, int offset, int length) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeLargeVarBinary(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeLargeVarBinary(long start, long end, ArrowBuf buffer) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeLargeVarBinary(start, end, buffer); + writer.setPosition(writer.idx() + 1); + } + + public void write(LargeVarBinaryHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeLargeVarBinary(holder.start, holder.end, holder.buffer); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeBit(int value) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeBit(value); + writer.setPosition(writer.idx() + 1); + } + + public void write(BitHolder holder) { + if (writer.idx() >= (idx() + 1) * listSize) { + throw new IllegalStateException(String.format("values at index %s is greater than listSize %s", idx(), listSize)); + } + writer.writeBit(holder.value); + writer.setPosition(writer.idx() + 1); + } + +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UnionLargeListWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UnionLargeListWriter.java new file mode 100644 index 000000000000..7d732834b743 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UnionLargeListWriter.java @@ -0,0 +1,1267 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + +import static org.apache.arrow.memory.util.LargeMemoryUtil.checkedCastToInt; + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using freemarker and the UnionListWriter.java template. + */ + +@SuppressWarnings("unused") +public class UnionLargeListWriter extends AbstractFieldWriter { + + protected LargeListVector vector; + protected PromotableWriter writer; + private boolean inStruct = false; + private boolean listStarted = false; + private String structName; + private static final long OFFSET_WIDTH = 8; + + public UnionLargeListWriter(LargeListVector vector) { + this(vector, NullableStructWriterFactory.getNullableStructWriterFactoryInstance()); + } + + public UnionLargeListWriter(LargeListVector vector, NullableStructWriterFactory nullableStructWriterFactory) { + this.vector = vector; + this.writer = new PromotableWriter(vector.getDataVector(), vector, nullableStructWriterFactory); + } + + public UnionLargeListWriter(LargeListVector vector, AbstractFieldWriter parent) { + this(vector); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + public Field getField() { + return vector.getField(); + } + + public void setValueCount(int count) { + vector.setValueCount(count); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void close() throws Exception { + vector.close(); + writer.close(); + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + } + + @Override + public TinyIntWriter tinyInt() { + return this; + } + + + @Override + public TinyIntWriter tinyInt(String name) { + structName = name; + return writer.tinyInt(name); + } + + @Override + public UInt1Writer uInt1() { + return this; + } + + + @Override + public UInt1Writer uInt1(String name) { + structName = name; + return writer.uInt1(name); + } + + @Override + public UInt2Writer uInt2() { + return this; + } + + + @Override + public UInt2Writer uInt2(String name) { + structName = name; + return writer.uInt2(name); + } + + @Override + public SmallIntWriter smallInt() { + return this; + } + + + @Override + public SmallIntWriter smallInt(String name) { + structName = name; + return writer.smallInt(name); + } + + @Override + public Float2Writer float2() { + return this; + } + + + @Override + public Float2Writer float2(String name) { + structName = name; + return writer.float2(name); + } + + @Override + public IntWriter integer() { + return this; + } + + + @Override + public IntWriter integer(String name) { + structName = name; + return writer.integer(name); + } + + @Override + public UInt4Writer uInt4() { + return this; + } + + + @Override + public UInt4Writer uInt4(String name) { + structName = name; + return writer.uInt4(name); + } + + @Override + public Float4Writer float4() { + return this; + } + + + @Override + public Float4Writer float4(String name) { + structName = name; + return writer.float4(name); + } + + @Override + public DateDayWriter dateDay() { + return this; + } + + + @Override + public DateDayWriter dateDay(String name) { + structName = name; + return writer.dateDay(name); + } + + @Override + public IntervalYearWriter intervalYear() { + return this; + } + + + @Override + public IntervalYearWriter intervalYear(String name) { + structName = name; + return writer.intervalYear(name); + } + + @Override + public TimeSecWriter timeSec() { + return this; + } + + + @Override + public TimeSecWriter timeSec(String name) { + structName = name; + return writer.timeSec(name); + } + + @Override + public TimeMilliWriter timeMilli() { + return this; + } + + + @Override + public TimeMilliWriter timeMilli(String name) { + structName = name; + return writer.timeMilli(name); + } + + @Override + public BigIntWriter bigInt() { + return this; + } + + + @Override + public BigIntWriter bigInt(String name) { + structName = name; + return writer.bigInt(name); + } + + @Override + public UInt8Writer uInt8() { + return this; + } + + + @Override + public UInt8Writer uInt8(String name) { + structName = name; + return writer.uInt8(name); + } + + @Override + public Float8Writer float8() { + return this; + } + + + @Override + public Float8Writer float8(String name) { + structName = name; + return writer.float8(name); + } + + @Override + public DateMilliWriter dateMilli() { + return this; + } + + + @Override + public DateMilliWriter dateMilli(String name) { + structName = name; + return writer.dateMilli(name); + } + + @Override + public DurationWriter duration() { + return this; + } + + @Override + public DurationWriter duration(String name, org.apache.arrow.vector.types.TimeUnit unit) { + return writer.duration(name, unit); + } + + @Override + public DurationWriter duration(String name) { + structName = name; + return writer.duration(name); + } + + @Override + public TimeStampSecWriter timeStampSec() { + return this; + } + + + @Override + public TimeStampSecWriter timeStampSec(String name) { + structName = name; + return writer.timeStampSec(name); + } + + @Override + public TimeStampMilliWriter timeStampMilli() { + return this; + } + + + @Override + public TimeStampMilliWriter timeStampMilli(String name) { + structName = name; + return writer.timeStampMilli(name); + } + + @Override + public TimeStampMicroWriter timeStampMicro() { + return this; + } + + + @Override + public TimeStampMicroWriter timeStampMicro(String name) { + structName = name; + return writer.timeStampMicro(name); + } + + @Override + public TimeStampNanoWriter timeStampNano() { + return this; + } + + + @Override + public TimeStampNanoWriter timeStampNano(String name) { + structName = name; + return writer.timeStampNano(name); + } + + @Override + public TimeStampSecTZWriter timeStampSecTZ() { + return this; + } + + @Override + public TimeStampSecTZWriter timeStampSecTZ(String name, String timezone) { + return writer.timeStampSecTZ(name, timezone); + } + + @Override + public TimeStampSecTZWriter timeStampSecTZ(String name) { + structName = name; + return writer.timeStampSecTZ(name); + } + + @Override + public TimeStampMilliTZWriter timeStampMilliTZ() { + return this; + } + + @Override + public TimeStampMilliTZWriter timeStampMilliTZ(String name, String timezone) { + return writer.timeStampMilliTZ(name, timezone); + } + + @Override + public TimeStampMilliTZWriter timeStampMilliTZ(String name) { + structName = name; + return writer.timeStampMilliTZ(name); + } + + @Override + public TimeStampMicroTZWriter timeStampMicroTZ() { + return this; + } + + @Override + public TimeStampMicroTZWriter timeStampMicroTZ(String name, String timezone) { + return writer.timeStampMicroTZ(name, timezone); + } + + @Override + public TimeStampMicroTZWriter timeStampMicroTZ(String name) { + structName = name; + return writer.timeStampMicroTZ(name); + } + + @Override + public TimeStampNanoTZWriter timeStampNanoTZ() { + return this; + } + + @Override + public TimeStampNanoTZWriter timeStampNanoTZ(String name, String timezone) { + return writer.timeStampNanoTZ(name, timezone); + } + + @Override + public TimeStampNanoTZWriter timeStampNanoTZ(String name) { + structName = name; + return writer.timeStampNanoTZ(name); + } + + @Override + public TimeMicroWriter timeMicro() { + return this; + } + + + @Override + public TimeMicroWriter timeMicro(String name) { + structName = name; + return writer.timeMicro(name); + } + + @Override + public TimeNanoWriter timeNano() { + return this; + } + + + @Override + public TimeNanoWriter timeNano(String name) { + structName = name; + return writer.timeNano(name); + } + + @Override + public IntervalDayWriter intervalDay() { + return this; + } + + + @Override + public IntervalDayWriter intervalDay(String name) { + structName = name; + return writer.intervalDay(name); + } + + @Override + public IntervalMonthDayNanoWriter intervalMonthDayNano() { + return this; + } + + + @Override + public IntervalMonthDayNanoWriter intervalMonthDayNano(String name) { + structName = name; + return writer.intervalMonthDayNano(name); + } + + @Override + public Decimal256Writer decimal256() { + return this; + } + + @Override + public Decimal256Writer decimal256(String name, int scale, int precision) { + return writer.decimal256(name, scale, precision); + } + + @Override + public Decimal256Writer decimal256(String name) { + structName = name; + return writer.decimal256(name); + } + + @Override + public DecimalWriter decimal() { + return this; + } + + @Override + public DecimalWriter decimal(String name, int scale, int precision) { + return writer.decimal(name, scale, precision); + } + + @Override + public DecimalWriter decimal(String name) { + structName = name; + return writer.decimal(name); + } + + @Override + public FixedSizeBinaryWriter fixedSizeBinary() { + return this; + } + + @Override + public FixedSizeBinaryWriter fixedSizeBinary(String name, int byteWidth) { + return writer.fixedSizeBinary(name, byteWidth); + } + + @Override + public FixedSizeBinaryWriter fixedSizeBinary(String name) { + structName = name; + return writer.fixedSizeBinary(name); + } + + @Override + public VarBinaryWriter varBinary() { + return this; + } + + + @Override + public VarBinaryWriter varBinary(String name) { + structName = name; + return writer.varBinary(name); + } + + @Override + public VarCharWriter varChar() { + return this; + } + + + @Override + public VarCharWriter varChar(String name) { + structName = name; + return writer.varChar(name); + } + + @Override + public LargeVarCharWriter largeVarChar() { + return this; + } + + + @Override + public LargeVarCharWriter largeVarChar(String name) { + structName = name; + return writer.largeVarChar(name); + } + + @Override + public LargeVarBinaryWriter largeVarBinary() { + return this; + } + + + @Override + public LargeVarBinaryWriter largeVarBinary(String name) { + structName = name; + return writer.largeVarBinary(name); + } + + @Override + public BitWriter bit() { + return this; + } + + + @Override + public BitWriter bit(String name) { + structName = name; + return writer.bit(name); + } + + + @Override + public StructWriter struct() { + inStruct = true; + return this; + } + + @Override + public ListWriter list() { + return writer; + } + + @Override + public ListWriter list(String name) { + ListWriter listWriter = writer.list(name); + return listWriter; + } + + @Override + public StructWriter struct(String name) { + StructWriter structWriter = writer.struct(name); + return structWriter; + } + + @Override + public MapWriter map() { + return writer; + } + + @Override + public MapWriter map(String name) { + MapWriter mapWriter = writer.map(name); + return mapWriter; + } + + @Override + public MapWriter map(boolean keysSorted) { + writer.map(keysSorted); + return writer; + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + MapWriter mapWriter = writer.map(name, keysSorted); + return mapWriter; + } + + @Override + public void startList() { + vector.startNewValue(idx()); + writer.setPosition(checkedCastToInt(vector.getOffsetBuffer().getLong((idx() + 1L) * OFFSET_WIDTH))); + listStarted = true; + } + + @Override + public void endList() { + vector.getOffsetBuffer().setLong((idx() + 1L) * OFFSET_WIDTH, writer.idx()); + setPosition(idx() + 1); + listStarted = false; + } + + @Override + public void start() { + writer.start(); + } + + @Override + public void end() { + writer.end(); + inStruct = false; + } + + @Override + public void writeNull() { + if (!listStarted){ + vector.setNull(idx()); + } else { + writer.writeNull(); + } + } + + @Override + public void writeTinyInt(byte value) { + writer.writeTinyInt(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TinyIntHolder holder) { + writer.writeTinyInt(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeUInt1(byte value) { + writer.writeUInt1(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(UInt1Holder holder) { + writer.writeUInt1(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeUInt2(char value) { + writer.writeUInt2(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(UInt2Holder holder) { + writer.writeUInt2(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeSmallInt(short value) { + writer.writeSmallInt(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(SmallIntHolder holder) { + writer.writeSmallInt(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeFloat2(short value) { + writer.writeFloat2(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(Float2Holder holder) { + writer.writeFloat2(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeInt(int value) { + writer.writeInt(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(IntHolder holder) { + writer.writeInt(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeUInt4(int value) { + writer.writeUInt4(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(UInt4Holder holder) { + writer.writeUInt4(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeFloat4(float value) { + writer.writeFloat4(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(Float4Holder holder) { + writer.writeFloat4(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeDateDay(int value) { + writer.writeDateDay(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(DateDayHolder holder) { + writer.writeDateDay(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeIntervalYear(int value) { + writer.writeIntervalYear(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(IntervalYearHolder holder) { + writer.writeIntervalYear(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeTimeSec(int value) { + writer.writeTimeSec(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeSecHolder holder) { + writer.writeTimeSec(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeTimeMilli(int value) { + writer.writeTimeMilli(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeMilliHolder holder) { + writer.writeTimeMilli(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeBigInt(long value) { + writer.writeBigInt(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(BigIntHolder holder) { + writer.writeBigInt(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeUInt8(long value) { + writer.writeUInt8(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(UInt8Holder holder) { + writer.writeUInt8(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeFloat8(double value) { + writer.writeFloat8(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(Float8Holder holder) { + writer.writeFloat8(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeDateMilli(long value) { + writer.writeDateMilli(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(DateMilliHolder holder) { + writer.writeDateMilli(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeDuration(long value) { + writer.writeDuration(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(DurationHolder holder) { + writer.write(holder); + writer.setPosition(writer.idx()+1); + } + + + + @Override + public void writeTimeStampSec(long value) { + writer.writeTimeStampSec(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeStampSecHolder holder) { + writer.writeTimeStampSec(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeTimeStampMilli(long value) { + writer.writeTimeStampMilli(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeStampMilliHolder holder) { + writer.writeTimeStampMilli(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeTimeStampMicro(long value) { + writer.writeTimeStampMicro(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeStampMicroHolder holder) { + writer.writeTimeStampMicro(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeTimeStampNano(long value) { + writer.writeTimeStampNano(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeStampNanoHolder holder) { + writer.writeTimeStampNano(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeTimeStampSecTZ(long value) { + writer.writeTimeStampSecTZ(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeStampSecTZHolder holder) { + writer.write(holder); + writer.setPosition(writer.idx()+1); + } + + + + @Override + public void writeTimeStampMilliTZ(long value) { + writer.writeTimeStampMilliTZ(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeStampMilliTZHolder holder) { + writer.write(holder); + writer.setPosition(writer.idx()+1); + } + + + + @Override + public void writeTimeStampMicroTZ(long value) { + writer.writeTimeStampMicroTZ(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeStampMicroTZHolder holder) { + writer.write(holder); + writer.setPosition(writer.idx()+1); + } + + + + @Override + public void writeTimeStampNanoTZ(long value) { + writer.writeTimeStampNanoTZ(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeStampNanoTZHolder holder) { + writer.write(holder); + writer.setPosition(writer.idx()+1); + } + + + + @Override + public void writeTimeMicro(long value) { + writer.writeTimeMicro(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeMicroHolder holder) { + writer.writeTimeMicro(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeTimeNano(long value) { + writer.writeTimeNano(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeNanoHolder holder) { + writer.writeTimeNano(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeIntervalDay(int days, int milliseconds) { + writer.writeIntervalDay(days, milliseconds); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(IntervalDayHolder holder) { + writer.writeIntervalDay(holder.days, holder.milliseconds); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeIntervalMonthDayNano(int months, int days, long nanoseconds) { + writer.writeIntervalMonthDayNano(months, days, nanoseconds); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(IntervalMonthDayNanoHolder holder) { + writer.writeIntervalMonthDayNano(holder.months, holder.days, holder.nanoseconds); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeDecimal256(long start, ArrowBuf buffer) { + writer.writeDecimal256(start, buffer); + writer.setPosition(writer.idx()+1); + } + + public void writeDecimal256(long start, ArrowBuf buffer, ArrowType arrowType) { + writer.writeDecimal256(start, buffer, arrowType); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(Decimal256Holder holder) { + writer.write(holder); + writer.setPosition(writer.idx()+1); + } + + public void writeDecimal256(BigDecimal value) { + writer.writeDecimal256(value); + writer.setPosition(writer.idx()+1); + } + + public void writeBigEndianBytesToDecimal256(byte[] value, ArrowType arrowType){ + writer.writeBigEndianBytesToDecimal256(value, arrowType); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeDecimal(long start, ArrowBuf buffer) { + writer.writeDecimal(start, buffer); + writer.setPosition(writer.idx()+1); + } + + public void writeDecimal(long start, ArrowBuf buffer, ArrowType arrowType) { + writer.writeDecimal(start, buffer, arrowType); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(DecimalHolder holder) { + writer.write(holder); + writer.setPosition(writer.idx()+1); + } + + public void writeDecimal(BigDecimal value) { + writer.writeDecimal(value); + writer.setPosition(writer.idx()+1); + } + + public void writeBigEndianBytesToDecimal(byte[] value, ArrowType arrowType){ + writer.writeBigEndianBytesToDecimal(value, arrowType); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeFixedSizeBinary(ArrowBuf buffer) { + writer.writeFixedSizeBinary(buffer); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(FixedSizeBinaryHolder holder) { + writer.write(holder); + writer.setPosition(writer.idx()+1); + } + + + + @Override + public void writeVarBinary(int start, int end, ArrowBuf buffer) { + writer.writeVarBinary(start, end, buffer); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(VarBinaryHolder holder) { + writer.writeVarBinary(holder.start, holder.end, holder.buffer); + writer.setPosition(writer.idx()+1); + } + + @Override + public void writeVarBinary(byte[] value) { + writer.writeVarBinary(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeVarBinary(byte[] value, int offset, int length) { + writer.writeVarBinary(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeVarBinary(ByteBuffer value) { + writer.writeVarBinary(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeVarBinary(ByteBuffer value, int offset, int length) { + writer.writeVarBinary(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeVarChar(int start, int end, ArrowBuf buffer) { + writer.writeVarChar(start, end, buffer); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(VarCharHolder holder) { + writer.writeVarChar(holder.start, holder.end, holder.buffer); + writer.setPosition(writer.idx()+1); + } + + @Override + public void writeVarChar(Text value) { + writer.writeVarChar(value); + writer.setPosition(writer.idx() + 1); + } + + public void writeVarChar(String value) { + writer.writeVarChar(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeLargeVarChar(long start, long end, ArrowBuf buffer) { + writer.writeLargeVarChar(start, end, buffer); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(LargeVarCharHolder holder) { + writer.writeLargeVarChar(holder.start, holder.end, holder.buffer); + writer.setPosition(writer.idx()+1); + } + + @Override + public void writeLargeVarChar(Text value) { + writer.writeLargeVarChar(value); + writer.setPosition(writer.idx() + 1); + } + + public void writeLargeVarChar(String value) { + writer.writeLargeVarChar(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeLargeVarBinary(long start, long end, ArrowBuf buffer) { + writer.writeLargeVarBinary(start, end, buffer); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(LargeVarBinaryHolder holder) { + writer.writeLargeVarBinary(holder.start, holder.end, holder.buffer); + writer.setPosition(writer.idx()+1); + } + + @Override + public void writeLargeVarBinary(byte[] value) { + writer.writeLargeVarBinary(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeLargeVarBinary(byte[] value, int offset, int length) { + writer.writeLargeVarBinary(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeLargeVarBinary(ByteBuffer value) { + writer.writeLargeVarBinary(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeLargeVarBinary(ByteBuffer value, int offset, int length) { + writer.writeLargeVarBinary(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeBit(int value) { + writer.writeBit(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(BitHolder holder) { + writer.writeBit(holder.value); + writer.setPosition(writer.idx()+1); + } + + +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UnionListWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UnionListWriter.java new file mode 100644 index 000000000000..ca72b795136a --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UnionListWriter.java @@ -0,0 +1,1268 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + +import static org.apache.arrow.memory.util.LargeMemoryUtil.checkedCastToInt; + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using freemarker and the UnionListWriter.java template. + */ + +@SuppressWarnings("unused") +public class UnionListWriter extends AbstractFieldWriter { + + protected ListVector vector; + protected PromotableWriter writer; + private boolean inStruct = false; + private boolean listStarted = false; + private String structName; + private static final int OFFSET_WIDTH = 4; + + public UnionListWriter(ListVector vector) { + this(vector, NullableStructWriterFactory.getNullableStructWriterFactoryInstance()); + } + + public UnionListWriter(ListVector vector, NullableStructWriterFactory nullableStructWriterFactory) { + this.vector = vector; + this.writer = new PromotableWriter(vector.getDataVector(), vector, nullableStructWriterFactory); + } + + public UnionListWriter(ListVector vector, AbstractFieldWriter parent) { + this(vector); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + public Field getField() { + return vector.getField(); + } + + public void setValueCount(int count) { + vector.setValueCount(count); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void close() throws Exception { + vector.close(); + writer.close(); + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + } + + @Override + public TinyIntWriter tinyInt() { + return this; + } + + + @Override + public TinyIntWriter tinyInt(String name) { + structName = name; + return writer.tinyInt(name); + } + + @Override + public UInt1Writer uInt1() { + return this; + } + + + @Override + public UInt1Writer uInt1(String name) { + structName = name; + return writer.uInt1(name); + } + + @Override + public UInt2Writer uInt2() { + return this; + } + + + @Override + public UInt2Writer uInt2(String name) { + structName = name; + return writer.uInt2(name); + } + + @Override + public SmallIntWriter smallInt() { + return this; + } + + + @Override + public SmallIntWriter smallInt(String name) { + structName = name; + return writer.smallInt(name); + } + + @Override + public Float2Writer float2() { + return this; + } + + + @Override + public Float2Writer float2(String name) { + structName = name; + return writer.float2(name); + } + + @Override + public IntWriter integer() { + return this; + } + + + @Override + public IntWriter integer(String name) { + structName = name; + return writer.integer(name); + } + + @Override + public UInt4Writer uInt4() { + return this; + } + + + @Override + public UInt4Writer uInt4(String name) { + structName = name; + return writer.uInt4(name); + } + + @Override + public Float4Writer float4() { + return this; + } + + + @Override + public Float4Writer float4(String name) { + structName = name; + return writer.float4(name); + } + + @Override + public DateDayWriter dateDay() { + return this; + } + + + @Override + public DateDayWriter dateDay(String name) { + structName = name; + return writer.dateDay(name); + } + + @Override + public IntervalYearWriter intervalYear() { + return this; + } + + + @Override + public IntervalYearWriter intervalYear(String name) { + structName = name; + return writer.intervalYear(name); + } + + @Override + public TimeSecWriter timeSec() { + return this; + } + + + @Override + public TimeSecWriter timeSec(String name) { + structName = name; + return writer.timeSec(name); + } + + @Override + public TimeMilliWriter timeMilli() { + return this; + } + + + @Override + public TimeMilliWriter timeMilli(String name) { + structName = name; + return writer.timeMilli(name); + } + + @Override + public BigIntWriter bigInt() { + return this; + } + + + @Override + public BigIntWriter bigInt(String name) { + structName = name; + return writer.bigInt(name); + } + + @Override + public UInt8Writer uInt8() { + return this; + } + + + @Override + public UInt8Writer uInt8(String name) { + structName = name; + return writer.uInt8(name); + } + + @Override + public Float8Writer float8() { + return this; + } + + + @Override + public Float8Writer float8(String name) { + structName = name; + return writer.float8(name); + } + + @Override + public DateMilliWriter dateMilli() { + return this; + } + + + @Override + public DateMilliWriter dateMilli(String name) { + structName = name; + return writer.dateMilli(name); + } + + @Override + public DurationWriter duration() { + return this; + } + + @Override + public DurationWriter duration(String name, org.apache.arrow.vector.types.TimeUnit unit) { + return writer.duration(name, unit); + } + + @Override + public DurationWriter duration(String name) { + structName = name; + return writer.duration(name); + } + + @Override + public TimeStampSecWriter timeStampSec() { + return this; + } + + + @Override + public TimeStampSecWriter timeStampSec(String name) { + structName = name; + return writer.timeStampSec(name); + } + + @Override + public TimeStampMilliWriter timeStampMilli() { + return this; + } + + + @Override + public TimeStampMilliWriter timeStampMilli(String name) { + structName = name; + return writer.timeStampMilli(name); + } + + @Override + public TimeStampMicroWriter timeStampMicro() { + return this; + } + + + @Override + public TimeStampMicroWriter timeStampMicro(String name) { + structName = name; + return writer.timeStampMicro(name); + } + + @Override + public TimeStampNanoWriter timeStampNano() { + return this; + } + + + @Override + public TimeStampNanoWriter timeStampNano(String name) { + structName = name; + return writer.timeStampNano(name); + } + + @Override + public TimeStampSecTZWriter timeStampSecTZ() { + return this; + } + + @Override + public TimeStampSecTZWriter timeStampSecTZ(String name, String timezone) { + return writer.timeStampSecTZ(name, timezone); + } + + @Override + public TimeStampSecTZWriter timeStampSecTZ(String name) { + structName = name; + return writer.timeStampSecTZ(name); + } + + @Override + public TimeStampMilliTZWriter timeStampMilliTZ() { + return this; + } + + @Override + public TimeStampMilliTZWriter timeStampMilliTZ(String name, String timezone) { + return writer.timeStampMilliTZ(name, timezone); + } + + @Override + public TimeStampMilliTZWriter timeStampMilliTZ(String name) { + structName = name; + return writer.timeStampMilliTZ(name); + } + + @Override + public TimeStampMicroTZWriter timeStampMicroTZ() { + return this; + } + + @Override + public TimeStampMicroTZWriter timeStampMicroTZ(String name, String timezone) { + return writer.timeStampMicroTZ(name, timezone); + } + + @Override + public TimeStampMicroTZWriter timeStampMicroTZ(String name) { + structName = name; + return writer.timeStampMicroTZ(name); + } + + @Override + public TimeStampNanoTZWriter timeStampNanoTZ() { + return this; + } + + @Override + public TimeStampNanoTZWriter timeStampNanoTZ(String name, String timezone) { + return writer.timeStampNanoTZ(name, timezone); + } + + @Override + public TimeStampNanoTZWriter timeStampNanoTZ(String name) { + structName = name; + return writer.timeStampNanoTZ(name); + } + + @Override + public TimeMicroWriter timeMicro() { + return this; + } + + + @Override + public TimeMicroWriter timeMicro(String name) { + structName = name; + return writer.timeMicro(name); + } + + @Override + public TimeNanoWriter timeNano() { + return this; + } + + + @Override + public TimeNanoWriter timeNano(String name) { + structName = name; + return writer.timeNano(name); + } + + @Override + public IntervalDayWriter intervalDay() { + return this; + } + + + @Override + public IntervalDayWriter intervalDay(String name) { + structName = name; + return writer.intervalDay(name); + } + + @Override + public IntervalMonthDayNanoWriter intervalMonthDayNano() { + return this; + } + + + @Override + public IntervalMonthDayNanoWriter intervalMonthDayNano(String name) { + structName = name; + return writer.intervalMonthDayNano(name); + } + + @Override + public Decimal256Writer decimal256() { + return this; + } + + @Override + public Decimal256Writer decimal256(String name, int scale, int precision) { + return writer.decimal256(name, scale, precision); + } + + @Override + public Decimal256Writer decimal256(String name) { + structName = name; + return writer.decimal256(name); + } + + @Override + public DecimalWriter decimal() { + return this; + } + + @Override + public DecimalWriter decimal(String name, int scale, int precision) { + return writer.decimal(name, scale, precision); + } + + @Override + public DecimalWriter decimal(String name) { + structName = name; + return writer.decimal(name); + } + + @Override + public FixedSizeBinaryWriter fixedSizeBinary() { + return this; + } + + @Override + public FixedSizeBinaryWriter fixedSizeBinary(String name, int byteWidth) { + return writer.fixedSizeBinary(name, byteWidth); + } + + @Override + public FixedSizeBinaryWriter fixedSizeBinary(String name) { + structName = name; + return writer.fixedSizeBinary(name); + } + + @Override + public VarBinaryWriter varBinary() { + return this; + } + + + @Override + public VarBinaryWriter varBinary(String name) { + structName = name; + return writer.varBinary(name); + } + + @Override + public VarCharWriter varChar() { + return this; + } + + + @Override + public VarCharWriter varChar(String name) { + structName = name; + return writer.varChar(name); + } + + @Override + public LargeVarCharWriter largeVarChar() { + return this; + } + + + @Override + public LargeVarCharWriter largeVarChar(String name) { + structName = name; + return writer.largeVarChar(name); + } + + @Override + public LargeVarBinaryWriter largeVarBinary() { + return this; + } + + + @Override + public LargeVarBinaryWriter largeVarBinary(String name) { + structName = name; + return writer.largeVarBinary(name); + } + + @Override + public BitWriter bit() { + return this; + } + + + @Override + public BitWriter bit(String name) { + structName = name; + return writer.bit(name); + } + + + @Override + public StructWriter struct() { + inStruct = true; + return this; + } + + @Override + public ListWriter list() { + return writer; + } + + @Override + public ListWriter list(String name) { + ListWriter listWriter = writer.list(name); + return listWriter; + } + + @Override + public StructWriter struct(String name) { + StructWriter structWriter = writer.struct(name); + return structWriter; + } + + @Override + public MapWriter map() { + return writer; + } + + @Override + public MapWriter map(String name) { + MapWriter mapWriter = writer.map(name); + return mapWriter; + } + + @Override + public MapWriter map(boolean keysSorted) { + writer.map(keysSorted); + return writer; + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + MapWriter mapWriter = writer.map(name, keysSorted); + return mapWriter; + } + + @Override + public void startList() { + vector.startNewValue(idx()); + writer.setPosition(vector.getOffsetBuffer().getInt((idx() + 1L) * OFFSET_WIDTH)); + listStarted = true; + } + + @Override + public void endList() { + vector.getOffsetBuffer().setInt((idx() + 1L) * OFFSET_WIDTH, writer.idx()); + setPosition(idx() + 1); + listStarted = false; + } + + @Override + public void start() { + writer.start(); + } + + @Override + public void end() { + writer.end(); + inStruct = false; + } + + @Override + public void writeNull() { + if (!listStarted){ + vector.setNull(idx()); + } else { + writer.writeNull(); + } + } + + @Override + public void writeTinyInt(byte value) { + writer.writeTinyInt(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TinyIntHolder holder) { + writer.writeTinyInt(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeUInt1(byte value) { + writer.writeUInt1(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(UInt1Holder holder) { + writer.writeUInt1(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeUInt2(char value) { + writer.writeUInt2(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(UInt2Holder holder) { + writer.writeUInt2(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeSmallInt(short value) { + writer.writeSmallInt(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(SmallIntHolder holder) { + writer.writeSmallInt(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeFloat2(short value) { + writer.writeFloat2(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(Float2Holder holder) { + writer.writeFloat2(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeInt(int value) { + writer.writeInt(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(IntHolder holder) { + writer.writeInt(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeUInt4(int value) { + writer.writeUInt4(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(UInt4Holder holder) { + writer.writeUInt4(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeFloat4(float value) { + writer.writeFloat4(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(Float4Holder holder) { + writer.writeFloat4(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeDateDay(int value) { + writer.writeDateDay(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(DateDayHolder holder) { + writer.writeDateDay(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeIntervalYear(int value) { + writer.writeIntervalYear(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(IntervalYearHolder holder) { + writer.writeIntervalYear(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeTimeSec(int value) { + writer.writeTimeSec(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeSecHolder holder) { + writer.writeTimeSec(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeTimeMilli(int value) { + writer.writeTimeMilli(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeMilliHolder holder) { + writer.writeTimeMilli(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeBigInt(long value) { + writer.writeBigInt(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(BigIntHolder holder) { + writer.writeBigInt(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeUInt8(long value) { + writer.writeUInt8(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(UInt8Holder holder) { + writer.writeUInt8(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeFloat8(double value) { + writer.writeFloat8(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(Float8Holder holder) { + writer.writeFloat8(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeDateMilli(long value) { + writer.writeDateMilli(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(DateMilliHolder holder) { + writer.writeDateMilli(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeDuration(long value) { + writer.writeDuration(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(DurationHolder holder) { + writer.write(holder); + writer.setPosition(writer.idx()+1); + } + + + + @Override + public void writeTimeStampSec(long value) { + writer.writeTimeStampSec(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeStampSecHolder holder) { + writer.writeTimeStampSec(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeTimeStampMilli(long value) { + writer.writeTimeStampMilli(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeStampMilliHolder holder) { + writer.writeTimeStampMilli(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeTimeStampMicro(long value) { + writer.writeTimeStampMicro(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeStampMicroHolder holder) { + writer.writeTimeStampMicro(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeTimeStampNano(long value) { + writer.writeTimeStampNano(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeStampNanoHolder holder) { + writer.writeTimeStampNano(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeTimeStampSecTZ(long value) { + writer.writeTimeStampSecTZ(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeStampSecTZHolder holder) { + writer.write(holder); + writer.setPosition(writer.idx()+1); + } + + + + @Override + public void writeTimeStampMilliTZ(long value) { + writer.writeTimeStampMilliTZ(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeStampMilliTZHolder holder) { + writer.write(holder); + writer.setPosition(writer.idx()+1); + } + + + + @Override + public void writeTimeStampMicroTZ(long value) { + writer.writeTimeStampMicroTZ(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeStampMicroTZHolder holder) { + writer.write(holder); + writer.setPosition(writer.idx()+1); + } + + + + @Override + public void writeTimeStampNanoTZ(long value) { + writer.writeTimeStampNanoTZ(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeStampNanoTZHolder holder) { + writer.write(holder); + writer.setPosition(writer.idx()+1); + } + + + + @Override + public void writeTimeMicro(long value) { + writer.writeTimeMicro(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeMicroHolder holder) { + writer.writeTimeMicro(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeTimeNano(long value) { + writer.writeTimeNano(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(TimeNanoHolder holder) { + writer.writeTimeNano(holder.value); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeIntervalDay(int days, int milliseconds) { + writer.writeIntervalDay(days, milliseconds); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(IntervalDayHolder holder) { + writer.writeIntervalDay(holder.days, holder.milliseconds); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeIntervalMonthDayNano(int months, int days, long nanoseconds) { + writer.writeIntervalMonthDayNano(months, days, nanoseconds); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(IntervalMonthDayNanoHolder holder) { + writer.writeIntervalMonthDayNano(holder.months, holder.days, holder.nanoseconds); + writer.setPosition(writer.idx()+1); + } + + + @Override + public void writeDecimal256(long start, ArrowBuf buffer) { + writer.writeDecimal256(start, buffer); + writer.setPosition(writer.idx()+1); + } + + public void writeDecimal256(long start, ArrowBuf buffer, ArrowType arrowType) { + writer.writeDecimal256(start, buffer, arrowType); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(Decimal256Holder holder) { + writer.write(holder); + writer.setPosition(writer.idx()+1); + } + + public void writeDecimal256(BigDecimal value) { + writer.writeDecimal256(value); + writer.setPosition(writer.idx()+1); + } + + public void writeBigEndianBytesToDecimal256(byte[] value, ArrowType arrowType){ + writer.writeBigEndianBytesToDecimal256(value, arrowType); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeDecimal(long start, ArrowBuf buffer) { + writer.writeDecimal(start, buffer); + writer.setPosition(writer.idx()+1); + } + + public void writeDecimal(long start, ArrowBuf buffer, ArrowType arrowType) { + writer.writeDecimal(start, buffer, arrowType); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(DecimalHolder holder) { + writer.write(holder); + writer.setPosition(writer.idx()+1); + } + + public void writeDecimal(BigDecimal value) { + writer.writeDecimal(value); + writer.setPosition(writer.idx()+1); + } + + public void writeBigEndianBytesToDecimal(byte[] value, ArrowType arrowType){ + writer.writeBigEndianBytesToDecimal(value, arrowType); + writer.setPosition(writer.idx() + 1); + } + + + @Override + public void writeFixedSizeBinary(ArrowBuf buffer) { + writer.writeFixedSizeBinary(buffer); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(FixedSizeBinaryHolder holder) { + writer.write(holder); + writer.setPosition(writer.idx()+1); + } + + + + @Override + public void writeVarBinary(int start, int end, ArrowBuf buffer) { + writer.writeVarBinary(start, end, buffer); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(VarBinaryHolder holder) { + writer.writeVarBinary(holder.start, holder.end, holder.buffer); + writer.setPosition(writer.idx()+1); + } + + @Override + public void writeVarBinary(byte[] value) { + writer.writeVarBinary(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeVarBinary(byte[] value, int offset, int length) { + writer.writeVarBinary(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeVarBinary(ByteBuffer value) { + writer.writeVarBinary(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeVarBinary(ByteBuffer value, int offset, int length) { + writer.writeVarBinary(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeVarChar(int start, int end, ArrowBuf buffer) { + writer.writeVarChar(start, end, buffer); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(VarCharHolder holder) { + writer.writeVarChar(holder.start, holder.end, holder.buffer); + writer.setPosition(writer.idx()+1); + } + + @Override + public void writeVarChar(Text value) { + writer.writeVarChar(value); + writer.setPosition(writer.idx() + 1); + } + + public void writeVarChar(String value) { + writer.writeVarChar(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeLargeVarChar(long start, long end, ArrowBuf buffer) { + writer.writeLargeVarChar(start, end, buffer); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(LargeVarCharHolder holder) { + writer.writeLargeVarChar(holder.start, holder.end, holder.buffer); + writer.setPosition(writer.idx()+1); + } + + @Override + public void writeLargeVarChar(Text value) { + writer.writeLargeVarChar(value); + writer.setPosition(writer.idx() + 1); + } + + public void writeLargeVarChar(String value) { + writer.writeLargeVarChar(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeLargeVarBinary(long start, long end, ArrowBuf buffer) { + writer.writeLargeVarBinary(start, end, buffer); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(LargeVarBinaryHolder holder) { + writer.writeLargeVarBinary(holder.start, holder.end, holder.buffer); + writer.setPosition(writer.idx()+1); + } + + @Override + public void writeLargeVarBinary(byte[] value) { + writer.writeLargeVarBinary(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeLargeVarBinary(byte[] value, int offset, int length) { + writer.writeLargeVarBinary(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeLargeVarBinary(ByteBuffer value) { + writer.writeLargeVarBinary(value); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeLargeVarBinary(ByteBuffer value, int offset, int length) { + writer.writeLargeVarBinary(value, offset, length); + writer.setPosition(writer.idx() + 1); + } + + @Override + public void writeBit(int value) { + writer.writeBit(value); + writer.setPosition(writer.idx()+1); + } + + @Override + public void write(BitHolder holder) { + writer.writeBit(holder.value); + writer.setPosition(writer.idx()+1); + } + + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UnionMapWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UnionMapWriter.java new file mode 100644 index 000000000000..435f3e0e8a40 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UnionMapWriter.java @@ -0,0 +1,578 @@ + + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/* + * This class is generated using freemarker and the UnionMapWriter.java template. + */ + +/** + *

Writer for MapVectors. This extends UnionListWriter to simplify writing map entries to a list + * of struct elements, with "key" and "value" fields. The procedure for writing a map begin with + * {@link #startMap()} followed by {@link #startEntry()}. An entry is written by using the + * {@link #key()} writer to write the key, then the {@link #value()} writer to write a value. After + * writing the value, call {@link #endEntry()} to complete the entry. Each map can have 1 or more + * entries. When done writing entries, call {@link #endMap()} to complete the map. + * + *

NOTE: the MapVector can have NULL values by not writing to position. If a map is started with + * {@link #startMap()}, then it must have a key written. The value of a map entry can be NULL by + * not using the {@link #value()} writer. + * + *

Example to write the following map to position 5 of a vector + *

{@code
+ *   // {
+ *   //   1 -> 3,
+ *   //   2 -> 4,
+ *   //   3 -> NULL
+ *   // }
+ *
+ *   UnionMapWriter writer = ...
+ *
+ *   writer.setPosition(5);
+ *   writer.startMap();
+ *   writer.startEntry();
+ *   writer.key().integer().writeInt(1);
+ *   writer.value().integer().writeInt(3);
+ *   writer.endEntry();
+ *   writer.startEntry();
+ *   writer.key().integer().writeInt(2);
+ *   writer.value().integer().writeInt(4);
+ *   writer.endEntry();
+ *   writer.startEntry();
+ *   writer.key().integer().writeInt(3);
+ *   writer.endEntry();
+ *   writer.endMap();
+ * 
+ *

+ */ +@SuppressWarnings("unused") +public class UnionMapWriter extends UnionListWriter { + + /** + * Current mode for writing map entries, set by calling {@link #key()} or {@link #value()} + * and reset with a call to {@link #endEntry()}. With KEY mode, a struct writer with field + * named "key" is returned. With VALUE mode, a struct writer with field named "value" is + * returned. In OFF mode, the writer will behave like a standard UnionListWriter + */ + private enum MapWriteMode { + OFF, + KEY, + VALUE, + } + + private MapWriteMode mode = MapWriteMode.OFF; + private StructWriter entryWriter; + + public UnionMapWriter(MapVector vector) { + super(vector); + entryWriter = struct(); + } + + /** Start writing a map that consists of 1 or more entries. */ + public void startMap() { + startList(); + } + + /** Complete the map. */ + public void endMap() { + endList(); + } + + /** + * Start a map entry that should be followed by calls to {@link #key()} and {@link #value()} + * writers. Call {@link #endEntry()} to complete the entry. + */ + public void startEntry() { + writer.setAddVectorAsNullable(false); + entryWriter.start(); + } + + /** Complete the map entry. */ + public void endEntry() { + entryWriter.end(); + mode = MapWriteMode.OFF; + writer.setAddVectorAsNullable(true); + } + + /** Return the key writer that is used to write to the "key" field. */ + public UnionMapWriter key() { + writer.setAddVectorAsNullable(false); + mode = MapWriteMode.KEY; + return this; + } + + /** Return the value writer that is used to write to the "value" field. */ + public UnionMapWriter value() { + writer.setAddVectorAsNullable(true); + mode = MapWriteMode.VALUE; + return this; + } + + @Override + public TinyIntWriter tinyInt() { + switch (mode) { + case KEY: + return entryWriter.tinyInt(MapVector.KEY_NAME); + case VALUE: + return entryWriter.tinyInt(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public UInt1Writer uInt1() { + switch (mode) { + case KEY: + return entryWriter.uInt1(MapVector.KEY_NAME); + case VALUE: + return entryWriter.uInt1(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public UInt2Writer uInt2() { + switch (mode) { + case KEY: + return entryWriter.uInt2(MapVector.KEY_NAME); + case VALUE: + return entryWriter.uInt2(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public SmallIntWriter smallInt() { + switch (mode) { + case KEY: + return entryWriter.smallInt(MapVector.KEY_NAME); + case VALUE: + return entryWriter.smallInt(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public Float2Writer float2() { + switch (mode) { + case KEY: + return entryWriter.float2(MapVector.KEY_NAME); + case VALUE: + return entryWriter.float2(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public IntWriter integer() { + switch (mode) { + case KEY: + return entryWriter.integer(MapVector.KEY_NAME); + case VALUE: + return entryWriter.integer(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public UInt4Writer uInt4() { + switch (mode) { + case KEY: + return entryWriter.uInt4(MapVector.KEY_NAME); + case VALUE: + return entryWriter.uInt4(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public Float4Writer float4() { + switch (mode) { + case KEY: + return entryWriter.float4(MapVector.KEY_NAME); + case VALUE: + return entryWriter.float4(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public DateDayWriter dateDay() { + switch (mode) { + case KEY: + return entryWriter.dateDay(MapVector.KEY_NAME); + case VALUE: + return entryWriter.dateDay(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public IntervalYearWriter intervalYear() { + switch (mode) { + case KEY: + return entryWriter.intervalYear(MapVector.KEY_NAME); + case VALUE: + return entryWriter.intervalYear(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public TimeSecWriter timeSec() { + switch (mode) { + case KEY: + return entryWriter.timeSec(MapVector.KEY_NAME); + case VALUE: + return entryWriter.timeSec(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public TimeMilliWriter timeMilli() { + switch (mode) { + case KEY: + return entryWriter.timeMilli(MapVector.KEY_NAME); + case VALUE: + return entryWriter.timeMilli(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public BigIntWriter bigInt() { + switch (mode) { + case KEY: + return entryWriter.bigInt(MapVector.KEY_NAME); + case VALUE: + return entryWriter.bigInt(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public UInt8Writer uInt8() { + switch (mode) { + case KEY: + return entryWriter.uInt8(MapVector.KEY_NAME); + case VALUE: + return entryWriter.uInt8(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public Float8Writer float8() { + switch (mode) { + case KEY: + return entryWriter.float8(MapVector.KEY_NAME); + case VALUE: + return entryWriter.float8(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public DateMilliWriter dateMilli() { + switch (mode) { + case KEY: + return entryWriter.dateMilli(MapVector.KEY_NAME); + case VALUE: + return entryWriter.dateMilli(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public TimeStampSecWriter timeStampSec() { + switch (mode) { + case KEY: + return entryWriter.timeStampSec(MapVector.KEY_NAME); + case VALUE: + return entryWriter.timeStampSec(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public TimeStampMilliWriter timeStampMilli() { + switch (mode) { + case KEY: + return entryWriter.timeStampMilli(MapVector.KEY_NAME); + case VALUE: + return entryWriter.timeStampMilli(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public TimeStampMicroWriter timeStampMicro() { + switch (mode) { + case KEY: + return entryWriter.timeStampMicro(MapVector.KEY_NAME); + case VALUE: + return entryWriter.timeStampMicro(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public TimeStampNanoWriter timeStampNano() { + switch (mode) { + case KEY: + return entryWriter.timeStampNano(MapVector.KEY_NAME); + case VALUE: + return entryWriter.timeStampNano(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public TimeMicroWriter timeMicro() { + switch (mode) { + case KEY: + return entryWriter.timeMicro(MapVector.KEY_NAME); + case VALUE: + return entryWriter.timeMicro(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public TimeNanoWriter timeNano() { + switch (mode) { + case KEY: + return entryWriter.timeNano(MapVector.KEY_NAME); + case VALUE: + return entryWriter.timeNano(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public IntervalDayWriter intervalDay() { + switch (mode) { + case KEY: + return entryWriter.intervalDay(MapVector.KEY_NAME); + case VALUE: + return entryWriter.intervalDay(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public IntervalMonthDayNanoWriter intervalMonthDayNano() { + switch (mode) { + case KEY: + return entryWriter.intervalMonthDayNano(MapVector.KEY_NAME); + case VALUE: + return entryWriter.intervalMonthDayNano(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public VarBinaryWriter varBinary() { + switch (mode) { + case KEY: + return entryWriter.varBinary(MapVector.KEY_NAME); + case VALUE: + return entryWriter.varBinary(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public VarCharWriter varChar() { + switch (mode) { + case KEY: + return entryWriter.varChar(MapVector.KEY_NAME); + case VALUE: + return entryWriter.varChar(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public LargeVarCharWriter largeVarChar() { + switch (mode) { + case KEY: + return entryWriter.largeVarChar(MapVector.KEY_NAME); + case VALUE: + return entryWriter.largeVarChar(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public LargeVarBinaryWriter largeVarBinary() { + switch (mode) { + case KEY: + return entryWriter.largeVarBinary(MapVector.KEY_NAME); + case VALUE: + return entryWriter.largeVarBinary(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public BitWriter bit() { + switch (mode) { + case KEY: + return entryWriter.bit(MapVector.KEY_NAME); + case VALUE: + return entryWriter.bit(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public DecimalWriter decimal() { + switch (mode) { + case KEY: + return entryWriter.decimal(MapVector.KEY_NAME); + case VALUE: + return entryWriter.decimal(MapVector.VALUE_NAME); + default: + return this; + } + } + + @Override + public Decimal256Writer decimal256() { + switch (mode) { + case KEY: + return entryWriter.decimal256(MapVector.KEY_NAME); + case VALUE: + return entryWriter.decimal256(MapVector.VALUE_NAME); + default: + return this; + } + } + + + @Override + public StructWriter struct() { + switch (mode) { + case KEY: + return entryWriter.struct(MapVector.KEY_NAME); + case VALUE: + return entryWriter.struct(MapVector.VALUE_NAME); + default: + return super.struct(); + } + } + + @Override + public ListWriter list() { + switch (mode) { + case KEY: + return entryWriter.list(MapVector.KEY_NAME); + case VALUE: + return entryWriter.list(MapVector.VALUE_NAME); + default: + return super.list(); + } + } + + @Override + public MapWriter map(boolean keysSorted) { + switch (mode) { + case KEY: + return entryWriter.map(MapVector.KEY_NAME, keysSorted); + case VALUE: + return entryWriter.map(MapVector.VALUE_NAME, keysSorted); + default: + return super.map(); + } + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UnionReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UnionReader.java new file mode 100644 index 000000000000..f814106d5200 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UnionReader.java @@ -0,0 +1,1078 @@ + + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/** + * Source code generated using FreeMarker template UnionReader.java + */ +@SuppressWarnings("unused") +public class UnionReader extends AbstractFieldReader { + + private static final int NUM_SUPPORTED_TYPES = 46; + + private BaseReader[] readers = new BaseReader[NUM_SUPPORTED_TYPES]; + public UnionVector data; + + public UnionReader(UnionVector data) { + this.data = data; + } + + public MinorType getMinorType() { + return TYPES[data.getTypeValue(idx())]; + } + + private static MinorType[] TYPES = new MinorType[NUM_SUPPORTED_TYPES]; + + static { + for (MinorType minorType : MinorType.values()) { + TYPES[minorType.ordinal()] = minorType; + } + } + + @Override + public Field getField() { + return data.getField(); + } + + public boolean isSet(){ + return !data.isNull(idx()); + } + + public void read(UnionHolder holder) { + holder.reader = this; + holder.isSet = this.isSet() ? 1 : 0; + } + + public void read(int index, UnionHolder holder) { + getList().read(index, holder); + } + + private FieldReader getReaderForIndex(int index) { + int typeValue = data.getTypeValue(index); + FieldReader reader = (FieldReader) readers[typeValue]; + if (reader != null) { + return reader; + } + switch (MinorType.values()[typeValue]) { + case NULL: + return NullReader.INSTANCE; + case STRUCT: + return (FieldReader) getStruct(); + case LIST: + return (FieldReader) getList(); + case MAP: + return (FieldReader) getMap(); + case TINYINT: + return (FieldReader) getTinyInt(); + case UINT1: + return (FieldReader) getUInt1(); + case UINT2: + return (FieldReader) getUInt2(); + case SMALLINT: + return (FieldReader) getSmallInt(); + case FLOAT2: + return (FieldReader) getFloat2(); + case INT: + return (FieldReader) getInt(); + case UINT4: + return (FieldReader) getUInt4(); + case FLOAT4: + return (FieldReader) getFloat4(); + case DATEDAY: + return (FieldReader) getDateDay(); + case INTERVALYEAR: + return (FieldReader) getIntervalYear(); + case TIMESEC: + return (FieldReader) getTimeSec(); + case TIMEMILLI: + return (FieldReader) getTimeMilli(); + case BIGINT: + return (FieldReader) getBigInt(); + case UINT8: + return (FieldReader) getUInt8(); + case FLOAT8: + return (FieldReader) getFloat8(); + case DATEMILLI: + return (FieldReader) getDateMilli(); + case DURATION: + return (FieldReader) getDuration(); + case TIMESTAMPSEC: + return (FieldReader) getTimeStampSec(); + case TIMESTAMPMILLI: + return (FieldReader) getTimeStampMilli(); + case TIMESTAMPMICRO: + return (FieldReader) getTimeStampMicro(); + case TIMESTAMPNANO: + return (FieldReader) getTimeStampNano(); + case TIMESTAMPSECTZ: + return (FieldReader) getTimeStampSecTZ(); + case TIMESTAMPMILLITZ: + return (FieldReader) getTimeStampMilliTZ(); + case TIMESTAMPMICROTZ: + return (FieldReader) getTimeStampMicroTZ(); + case TIMESTAMPNANOTZ: + return (FieldReader) getTimeStampNanoTZ(); + case TIMEMICRO: + return (FieldReader) getTimeMicro(); + case TIMENANO: + return (FieldReader) getTimeNano(); + case INTERVALDAY: + return (FieldReader) getIntervalDay(); + case INTERVALMONTHDAYNANO: + return (FieldReader) getIntervalMonthDayNano(); + case DECIMAL256: + return (FieldReader) getDecimal256(); + case DECIMAL: + return (FieldReader) getDecimal(); + case FIXEDSIZEBINARY: + return (FieldReader) getFixedSizeBinary(); + case VARBINARY: + return (FieldReader) getVarBinary(); + case VARCHAR: + return (FieldReader) getVarChar(); + case LARGEVARCHAR: + return (FieldReader) getLargeVarChar(); + case LARGEVARBINARY: + return (FieldReader) getLargeVarBinary(); + case BIT: + return (FieldReader) getBit(); + default: + throw new UnsupportedOperationException("Unsupported type: " + MinorType.values()[typeValue]); + } + } + + private SingleStructReaderImpl structReader; + + private StructReader getStruct() { + if (structReader == null) { + structReader = (SingleStructReaderImpl) data.getStruct().getReader(); + structReader.setPosition(idx()); + readers[MinorType.STRUCT.ordinal()] = structReader; + } + return structReader; + } + + private UnionListReader listReader; + + private FieldReader getList() { + if (listReader == null) { + listReader = new UnionListReader(data.getList()); + listReader.setPosition(idx()); + readers[MinorType.LIST.ordinal()] = listReader; + } + return listReader; + } + + private UnionMapReader mapReader; + + private FieldReader getMap() { + if (mapReader == null) { + mapReader = new UnionMapReader(data.getMap()); + mapReader.setPosition(idx()); + readers[MinorType.MAP.ordinal()] = mapReader; + } + return mapReader; + } + + @Override + public java.util.Iterator iterator() { + return getStruct().iterator(); + } + + @Override + public void copyAsValue(UnionWriter writer) { + writer.data.copyFrom(idx(), writer.idx(), data); + } + + + @Override + public Object readObject() { + return getReaderForIndex(idx()).readObject(); + } + + + @Override + public BigDecimal readBigDecimal() { + return getReaderForIndex(idx()).readBigDecimal(); + } + + + @Override + public Short readShort() { + return getReaderForIndex(idx()).readShort(); + } + + + @Override + public Integer readInteger() { + return getReaderForIndex(idx()).readInteger(); + } + + + @Override + public Long readLong() { + return getReaderForIndex(idx()).readLong(); + } + + + @Override + public Boolean readBoolean() { + return getReaderForIndex(idx()).readBoolean(); + } + + + @Override + public LocalDateTime readLocalDateTime() { + return getReaderForIndex(idx()).readLocalDateTime(); + } + + + @Override + public Duration readDuration() { + return getReaderForIndex(idx()).readDuration(); + } + + + @Override + public Period readPeriod() { + return getReaderForIndex(idx()).readPeriod(); + } + + + @Override + public Double readDouble() { + return getReaderForIndex(idx()).readDouble(); + } + + + @Override + public Float readFloat() { + return getReaderForIndex(idx()).readFloat(); + } + + + @Override + public Character readCharacter() { + return getReaderForIndex(idx()).readCharacter(); + } + + + @Override + public Text readText() { + return getReaderForIndex(idx()).readText(); + } + + + @Override + public Byte readByte() { + return getReaderForIndex(idx()).readByte(); + } + + + @Override + public byte[] readByteArray() { + return getReaderForIndex(idx()).readByteArray(); + } + + + @Override + public PeriodDuration readPeriodDuration() { + return getReaderForIndex(idx()).readPeriodDuration(); + } + + + public int size() { + return getReaderForIndex(idx()).size(); + } + + + private TinyIntReaderImpl tinyIntReader; + + private TinyIntReaderImpl getTinyInt() { + if (tinyIntReader == null) { + tinyIntReader = new TinyIntReaderImpl(data.getTinyIntVector()); + tinyIntReader.setPosition(idx()); + readers[MinorType.TINYINT.ordinal()] = tinyIntReader; + } + return tinyIntReader; + } + + public void read(NullableTinyIntHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(TinyIntWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private UInt1ReaderImpl uInt1Reader; + + private UInt1ReaderImpl getUInt1() { + if (uInt1Reader == null) { + uInt1Reader = new UInt1ReaderImpl(data.getUInt1Vector()); + uInt1Reader.setPosition(idx()); + readers[MinorType.UINT1.ordinal()] = uInt1Reader; + } + return uInt1Reader; + } + + public void read(NullableUInt1Holder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(UInt1Writer writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private UInt2ReaderImpl uInt2Reader; + + private UInt2ReaderImpl getUInt2() { + if (uInt2Reader == null) { + uInt2Reader = new UInt2ReaderImpl(data.getUInt2Vector()); + uInt2Reader.setPosition(idx()); + readers[MinorType.UINT2.ordinal()] = uInt2Reader; + } + return uInt2Reader; + } + + public void read(NullableUInt2Holder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(UInt2Writer writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private SmallIntReaderImpl smallIntReader; + + private SmallIntReaderImpl getSmallInt() { + if (smallIntReader == null) { + smallIntReader = new SmallIntReaderImpl(data.getSmallIntVector()); + smallIntReader.setPosition(idx()); + readers[MinorType.SMALLINT.ordinal()] = smallIntReader; + } + return smallIntReader; + } + + public void read(NullableSmallIntHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(SmallIntWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private Float2ReaderImpl float2Reader; + + private Float2ReaderImpl getFloat2() { + if (float2Reader == null) { + float2Reader = new Float2ReaderImpl(data.getFloat2Vector()); + float2Reader.setPosition(idx()); + readers[MinorType.FLOAT2.ordinal()] = float2Reader; + } + return float2Reader; + } + + public void read(NullableFloat2Holder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(Float2Writer writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private IntReaderImpl intReader; + + private IntReaderImpl getInt() { + if (intReader == null) { + intReader = new IntReaderImpl(data.getIntVector()); + intReader.setPosition(idx()); + readers[MinorType.INT.ordinal()] = intReader; + } + return intReader; + } + + public void read(NullableIntHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(IntWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private UInt4ReaderImpl uInt4Reader; + + private UInt4ReaderImpl getUInt4() { + if (uInt4Reader == null) { + uInt4Reader = new UInt4ReaderImpl(data.getUInt4Vector()); + uInt4Reader.setPosition(idx()); + readers[MinorType.UINT4.ordinal()] = uInt4Reader; + } + return uInt4Reader; + } + + public void read(NullableUInt4Holder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(UInt4Writer writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private Float4ReaderImpl float4Reader; + + private Float4ReaderImpl getFloat4() { + if (float4Reader == null) { + float4Reader = new Float4ReaderImpl(data.getFloat4Vector()); + float4Reader.setPosition(idx()); + readers[MinorType.FLOAT4.ordinal()] = float4Reader; + } + return float4Reader; + } + + public void read(NullableFloat4Holder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(Float4Writer writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private DateDayReaderImpl dateDayReader; + + private DateDayReaderImpl getDateDay() { + if (dateDayReader == null) { + dateDayReader = new DateDayReaderImpl(data.getDateDayVector()); + dateDayReader.setPosition(idx()); + readers[MinorType.DATEDAY.ordinal()] = dateDayReader; + } + return dateDayReader; + } + + public void read(NullableDateDayHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(DateDayWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private IntervalYearReaderImpl intervalYearReader; + + private IntervalYearReaderImpl getIntervalYear() { + if (intervalYearReader == null) { + intervalYearReader = new IntervalYearReaderImpl(data.getIntervalYearVector()); + intervalYearReader.setPosition(idx()); + readers[MinorType.INTERVALYEAR.ordinal()] = intervalYearReader; + } + return intervalYearReader; + } + + public void read(NullableIntervalYearHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(IntervalYearWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private TimeSecReaderImpl timeSecReader; + + private TimeSecReaderImpl getTimeSec() { + if (timeSecReader == null) { + timeSecReader = new TimeSecReaderImpl(data.getTimeSecVector()); + timeSecReader.setPosition(idx()); + readers[MinorType.TIMESEC.ordinal()] = timeSecReader; + } + return timeSecReader; + } + + public void read(NullableTimeSecHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(TimeSecWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private TimeMilliReaderImpl timeMilliReader; + + private TimeMilliReaderImpl getTimeMilli() { + if (timeMilliReader == null) { + timeMilliReader = new TimeMilliReaderImpl(data.getTimeMilliVector()); + timeMilliReader.setPosition(idx()); + readers[MinorType.TIMEMILLI.ordinal()] = timeMilliReader; + } + return timeMilliReader; + } + + public void read(NullableTimeMilliHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(TimeMilliWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private BigIntReaderImpl bigIntReader; + + private BigIntReaderImpl getBigInt() { + if (bigIntReader == null) { + bigIntReader = new BigIntReaderImpl(data.getBigIntVector()); + bigIntReader.setPosition(idx()); + readers[MinorType.BIGINT.ordinal()] = bigIntReader; + } + return bigIntReader; + } + + public void read(NullableBigIntHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(BigIntWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private UInt8ReaderImpl uInt8Reader; + + private UInt8ReaderImpl getUInt8() { + if (uInt8Reader == null) { + uInt8Reader = new UInt8ReaderImpl(data.getUInt8Vector()); + uInt8Reader.setPosition(idx()); + readers[MinorType.UINT8.ordinal()] = uInt8Reader; + } + return uInt8Reader; + } + + public void read(NullableUInt8Holder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(UInt8Writer writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private Float8ReaderImpl float8Reader; + + private Float8ReaderImpl getFloat8() { + if (float8Reader == null) { + float8Reader = new Float8ReaderImpl(data.getFloat8Vector()); + float8Reader.setPosition(idx()); + readers[MinorType.FLOAT8.ordinal()] = float8Reader; + } + return float8Reader; + } + + public void read(NullableFloat8Holder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(Float8Writer writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private DateMilliReaderImpl dateMilliReader; + + private DateMilliReaderImpl getDateMilli() { + if (dateMilliReader == null) { + dateMilliReader = new DateMilliReaderImpl(data.getDateMilliVector()); + dateMilliReader.setPosition(idx()); + readers[MinorType.DATEMILLI.ordinal()] = dateMilliReader; + } + return dateMilliReader; + } + + public void read(NullableDateMilliHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(DateMilliWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private DurationReaderImpl durationReader; + + private DurationReaderImpl getDuration() { + if (durationReader == null) { + durationReader = new DurationReaderImpl(data.getDurationVector()); + durationReader.setPosition(idx()); + readers[MinorType.DURATION.ordinal()] = durationReader; + } + return durationReader; + } + + public void read(NullableDurationHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(DurationWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private TimeStampSecReaderImpl timeStampSecReader; + + private TimeStampSecReaderImpl getTimeStampSec() { + if (timeStampSecReader == null) { + timeStampSecReader = new TimeStampSecReaderImpl(data.getTimeStampSecVector()); + timeStampSecReader.setPosition(idx()); + readers[MinorType.TIMESTAMPSEC.ordinal()] = timeStampSecReader; + } + return timeStampSecReader; + } + + public void read(NullableTimeStampSecHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(TimeStampSecWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private TimeStampMilliReaderImpl timeStampMilliReader; + + private TimeStampMilliReaderImpl getTimeStampMilli() { + if (timeStampMilliReader == null) { + timeStampMilliReader = new TimeStampMilliReaderImpl(data.getTimeStampMilliVector()); + timeStampMilliReader.setPosition(idx()); + readers[MinorType.TIMESTAMPMILLI.ordinal()] = timeStampMilliReader; + } + return timeStampMilliReader; + } + + public void read(NullableTimeStampMilliHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(TimeStampMilliWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private TimeStampMicroReaderImpl timeStampMicroReader; + + private TimeStampMicroReaderImpl getTimeStampMicro() { + if (timeStampMicroReader == null) { + timeStampMicroReader = new TimeStampMicroReaderImpl(data.getTimeStampMicroVector()); + timeStampMicroReader.setPosition(idx()); + readers[MinorType.TIMESTAMPMICRO.ordinal()] = timeStampMicroReader; + } + return timeStampMicroReader; + } + + public void read(NullableTimeStampMicroHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(TimeStampMicroWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private TimeStampNanoReaderImpl timeStampNanoReader; + + private TimeStampNanoReaderImpl getTimeStampNano() { + if (timeStampNanoReader == null) { + timeStampNanoReader = new TimeStampNanoReaderImpl(data.getTimeStampNanoVector()); + timeStampNanoReader.setPosition(idx()); + readers[MinorType.TIMESTAMPNANO.ordinal()] = timeStampNanoReader; + } + return timeStampNanoReader; + } + + public void read(NullableTimeStampNanoHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(TimeStampNanoWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private TimeStampSecTZReaderImpl timeStampSecTZReader; + + private TimeStampSecTZReaderImpl getTimeStampSecTZ() { + if (timeStampSecTZReader == null) { + timeStampSecTZReader = new TimeStampSecTZReaderImpl(data.getTimeStampSecTZVector()); + timeStampSecTZReader.setPosition(idx()); + readers[MinorType.TIMESTAMPSECTZ.ordinal()] = timeStampSecTZReader; + } + return timeStampSecTZReader; + } + + public void read(NullableTimeStampSecTZHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(TimeStampSecTZWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private TimeStampMilliTZReaderImpl timeStampMilliTZReader; + + private TimeStampMilliTZReaderImpl getTimeStampMilliTZ() { + if (timeStampMilliTZReader == null) { + timeStampMilliTZReader = new TimeStampMilliTZReaderImpl(data.getTimeStampMilliTZVector()); + timeStampMilliTZReader.setPosition(idx()); + readers[MinorType.TIMESTAMPMILLITZ.ordinal()] = timeStampMilliTZReader; + } + return timeStampMilliTZReader; + } + + public void read(NullableTimeStampMilliTZHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(TimeStampMilliTZWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private TimeStampMicroTZReaderImpl timeStampMicroTZReader; + + private TimeStampMicroTZReaderImpl getTimeStampMicroTZ() { + if (timeStampMicroTZReader == null) { + timeStampMicroTZReader = new TimeStampMicroTZReaderImpl(data.getTimeStampMicroTZVector()); + timeStampMicroTZReader.setPosition(idx()); + readers[MinorType.TIMESTAMPMICROTZ.ordinal()] = timeStampMicroTZReader; + } + return timeStampMicroTZReader; + } + + public void read(NullableTimeStampMicroTZHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(TimeStampMicroTZWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private TimeStampNanoTZReaderImpl timeStampNanoTZReader; + + private TimeStampNanoTZReaderImpl getTimeStampNanoTZ() { + if (timeStampNanoTZReader == null) { + timeStampNanoTZReader = new TimeStampNanoTZReaderImpl(data.getTimeStampNanoTZVector()); + timeStampNanoTZReader.setPosition(idx()); + readers[MinorType.TIMESTAMPNANOTZ.ordinal()] = timeStampNanoTZReader; + } + return timeStampNanoTZReader; + } + + public void read(NullableTimeStampNanoTZHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(TimeStampNanoTZWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private TimeMicroReaderImpl timeMicroReader; + + private TimeMicroReaderImpl getTimeMicro() { + if (timeMicroReader == null) { + timeMicroReader = new TimeMicroReaderImpl(data.getTimeMicroVector()); + timeMicroReader.setPosition(idx()); + readers[MinorType.TIMEMICRO.ordinal()] = timeMicroReader; + } + return timeMicroReader; + } + + public void read(NullableTimeMicroHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(TimeMicroWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private TimeNanoReaderImpl timeNanoReader; + + private TimeNanoReaderImpl getTimeNano() { + if (timeNanoReader == null) { + timeNanoReader = new TimeNanoReaderImpl(data.getTimeNanoVector()); + timeNanoReader.setPosition(idx()); + readers[MinorType.TIMENANO.ordinal()] = timeNanoReader; + } + return timeNanoReader; + } + + public void read(NullableTimeNanoHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(TimeNanoWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private IntervalDayReaderImpl intervalDayReader; + + private IntervalDayReaderImpl getIntervalDay() { + if (intervalDayReader == null) { + intervalDayReader = new IntervalDayReaderImpl(data.getIntervalDayVector()); + intervalDayReader.setPosition(idx()); + readers[MinorType.INTERVALDAY.ordinal()] = intervalDayReader; + } + return intervalDayReader; + } + + public void read(NullableIntervalDayHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(IntervalDayWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private IntervalMonthDayNanoReaderImpl intervalMonthDayNanoReader; + + private IntervalMonthDayNanoReaderImpl getIntervalMonthDayNano() { + if (intervalMonthDayNanoReader == null) { + intervalMonthDayNanoReader = new IntervalMonthDayNanoReaderImpl(data.getIntervalMonthDayNanoVector()); + intervalMonthDayNanoReader.setPosition(idx()); + readers[MinorType.INTERVALMONTHDAYNANO.ordinal()] = intervalMonthDayNanoReader; + } + return intervalMonthDayNanoReader; + } + + public void read(NullableIntervalMonthDayNanoHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(IntervalMonthDayNanoWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private Decimal256ReaderImpl decimal256Reader; + + private Decimal256ReaderImpl getDecimal256() { + if (decimal256Reader == null) { + decimal256Reader = new Decimal256ReaderImpl(data.getDecimal256Vector()); + decimal256Reader.setPosition(idx()); + readers[MinorType.DECIMAL256.ordinal()] = decimal256Reader; + } + return decimal256Reader; + } + + public void read(NullableDecimal256Holder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(Decimal256Writer writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private DecimalReaderImpl decimalReader; + + private DecimalReaderImpl getDecimal() { + if (decimalReader == null) { + decimalReader = new DecimalReaderImpl(data.getDecimalVector()); + decimalReader.setPosition(idx()); + readers[MinorType.DECIMAL.ordinal()] = decimalReader; + } + return decimalReader; + } + + public void read(NullableDecimalHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(DecimalWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private FixedSizeBinaryReaderImpl fixedSizeBinaryReader; + + private FixedSizeBinaryReaderImpl getFixedSizeBinary() { + if (fixedSizeBinaryReader == null) { + fixedSizeBinaryReader = new FixedSizeBinaryReaderImpl(data.getFixedSizeBinaryVector()); + fixedSizeBinaryReader.setPosition(idx()); + readers[MinorType.FIXEDSIZEBINARY.ordinal()] = fixedSizeBinaryReader; + } + return fixedSizeBinaryReader; + } + + public void read(NullableFixedSizeBinaryHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(FixedSizeBinaryWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private VarBinaryReaderImpl varBinaryReader; + + private VarBinaryReaderImpl getVarBinary() { + if (varBinaryReader == null) { + varBinaryReader = new VarBinaryReaderImpl(data.getVarBinaryVector()); + varBinaryReader.setPosition(idx()); + readers[MinorType.VARBINARY.ordinal()] = varBinaryReader; + } + return varBinaryReader; + } + + public void read(NullableVarBinaryHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(VarBinaryWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private VarCharReaderImpl varCharReader; + + private VarCharReaderImpl getVarChar() { + if (varCharReader == null) { + varCharReader = new VarCharReaderImpl(data.getVarCharVector()); + varCharReader.setPosition(idx()); + readers[MinorType.VARCHAR.ordinal()] = varCharReader; + } + return varCharReader; + } + + public void read(NullableVarCharHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(VarCharWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private LargeVarCharReaderImpl largeVarCharReader; + + private LargeVarCharReaderImpl getLargeVarChar() { + if (largeVarCharReader == null) { + largeVarCharReader = new LargeVarCharReaderImpl(data.getLargeVarCharVector()); + largeVarCharReader.setPosition(idx()); + readers[MinorType.LARGEVARCHAR.ordinal()] = largeVarCharReader; + } + return largeVarCharReader; + } + + public void read(NullableLargeVarCharHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(LargeVarCharWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private LargeVarBinaryReaderImpl largeVarBinaryReader; + + private LargeVarBinaryReaderImpl getLargeVarBinary() { + if (largeVarBinaryReader == null) { + largeVarBinaryReader = new LargeVarBinaryReaderImpl(data.getLargeVarBinaryVector()); + largeVarBinaryReader.setPosition(idx()); + readers[MinorType.LARGEVARBINARY.ordinal()] = largeVarBinaryReader; + } + return largeVarBinaryReader; + } + + public void read(NullableLargeVarBinaryHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(LargeVarBinaryWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + private BitReaderImpl bitReader; + + private BitReaderImpl getBit() { + if (bitReader == null) { + bitReader = new BitReaderImpl(data.getBitVector()); + bitReader.setPosition(idx()); + readers[MinorType.BIT.ordinal()] = bitReader; + } + return bitReader; + } + + public void read(NullableBitHolder holder){ + getReaderForIndex(idx()).read(holder); + } + + public void copyAsValue(BitWriter writer){ + getReaderForIndex(idx()).copyAsValue(writer); + } + + @Override + public void copyAsValue(ListWriter writer) { + ComplexCopier.copy(this, (FieldWriter) writer); + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + for (BaseReader reader : readers) { + if (reader != null) { + reader.setPosition(index); + } + } + } + + public FieldReader reader(String name){ + return getStruct().reader(name); + } + + public FieldReader reader() { + return getList().reader(); + } + + public boolean next() { + return getReaderForIndex(idx()).next(); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UnionWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UnionWriter.java new file mode 100644 index 000000000000..979bb1b54bbe --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/UnionWriter.java @@ -0,0 +1,2098 @@ + + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +import org.apache.arrow.vector.complex.writer.BaseWriter; +import org.apache.arrow.vector.types.Types.MinorType; + + + +/* + * This class is generated using freemarker and the UnionWriter.java template. + */ +@SuppressWarnings("unused") +public class UnionWriter extends AbstractFieldWriter implements FieldWriter { + + UnionVector data; + private StructWriter structWriter; + private UnionListWriter listWriter; + private UnionMapWriter mapWriter; + private List writers = new java.util.ArrayList<>(); + private final NullableStructWriterFactory nullableStructWriterFactory; + + public UnionWriter(UnionVector vector) { + this(vector, NullableStructWriterFactory.getNullableStructWriterFactoryInstance()); + } + + public UnionWriter(UnionVector vector, NullableStructWriterFactory nullableStructWriterFactory) { + data = vector; + this.nullableStructWriterFactory = nullableStructWriterFactory; + } + + @Override + public void setPosition(int index) { + super.setPosition(index); + for (BaseWriter writer : writers) { + writer.setPosition(index); + } + } + + + @Override + public void start() { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().start(); + } + + @Override + public void end() { + getStructWriter().end(); + } + + @Override + public void startList() { + getListWriter().startList(); + data.setType(idx(), MinorType.LIST); + } + + @Override + public void endList() { + getListWriter().endList(); + } + + @Override + public void startMap() { + getMapWriter().startMap(); + data.setType(idx(), MinorType.MAP); + } + + @Override + public void endMap() { + getMapWriter().endMap(); + } + + @Override + public void startEntry() { + getMapWriter().startEntry(); + } + + @Override + public MapWriter key() { + return getMapWriter().key(); + } + + @Override + public MapWriter value() { + return getMapWriter().value(); + } + + @Override + public void endEntry() { + getMapWriter().endEntry(); + } + + private StructWriter getStructWriter() { + if (structWriter == null) { + structWriter = nullableStructWriterFactory.build(data.getStruct()); + structWriter.setPosition(idx()); + writers.add(structWriter); + } + return structWriter; + } + + public StructWriter asStruct() { + data.setType(idx(), MinorType.STRUCT); + return getStructWriter(); + } + + private ListWriter getListWriter() { + if (listWriter == null) { + listWriter = new UnionListWriter(data.getList(), nullableStructWriterFactory); + listWriter.setPosition(idx()); + writers.add(listWriter); + } + return listWriter; + } + + public ListWriter asList() { + data.setType(idx(), MinorType.LIST); + return getListWriter(); + } + + private MapWriter getMapWriter() { + if (mapWriter == null) { + mapWriter = new UnionMapWriter(data.getMap(new ArrowType.Map(false))); + mapWriter.setPosition(idx()); + writers.add(mapWriter); + } + return mapWriter; + } + + private MapWriter getMapWriter(ArrowType arrowType) { + if (mapWriter == null) { + mapWriter = new UnionMapWriter(data.getMap(arrowType)); + mapWriter.setPosition(idx()); + writers.add(mapWriter); + } + return mapWriter; + } + + public MapWriter asMap(ArrowType arrowType) { + data.setType(idx(), MinorType.MAP); + return getMapWriter(arrowType); + } + + BaseWriter getWriter(MinorType minorType) { + return getWriter(minorType, null); + } + + BaseWriter getWriter(MinorType minorType, ArrowType arrowType) { + switch (minorType) { + case STRUCT: + return getStructWriter(); + case LIST: + return getListWriter(); + case MAP: + return getMapWriter(arrowType); + case TINYINT: + return getTinyIntWriter(); + case UINT1: + return getUInt1Writer(); + case UINT2: + return getUInt2Writer(); + case SMALLINT: + return getSmallIntWriter(); + case FLOAT2: + return getFloat2Writer(); + case INT: + return getIntWriter(); + case UINT4: + return getUInt4Writer(); + case FLOAT4: + return getFloat4Writer(); + case DATEDAY: + return getDateDayWriter(); + case INTERVALYEAR: + return getIntervalYearWriter(); + case TIMESEC: + return getTimeSecWriter(); + case TIMEMILLI: + return getTimeMilliWriter(); + case BIGINT: + return getBigIntWriter(); + case UINT8: + return getUInt8Writer(); + case FLOAT8: + return getFloat8Writer(); + case DATEMILLI: + return getDateMilliWriter(); + case DURATION: + return getDurationWriter(arrowType); + case TIMESTAMPSEC: + return getTimeStampSecWriter(); + case TIMESTAMPMILLI: + return getTimeStampMilliWriter(); + case TIMESTAMPMICRO: + return getTimeStampMicroWriter(); + case TIMESTAMPNANO: + return getTimeStampNanoWriter(); + case TIMESTAMPSECTZ: + return getTimeStampSecTZWriter(arrowType); + case TIMESTAMPMILLITZ: + return getTimeStampMilliTZWriter(arrowType); + case TIMESTAMPMICROTZ: + return getTimeStampMicroTZWriter(arrowType); + case TIMESTAMPNANOTZ: + return getTimeStampNanoTZWriter(arrowType); + case TIMEMICRO: + return getTimeMicroWriter(); + case TIMENANO: + return getTimeNanoWriter(); + case INTERVALDAY: + return getIntervalDayWriter(); + case INTERVALMONTHDAYNANO: + return getIntervalMonthDayNanoWriter(); + case DECIMAL256: + return getDecimal256Writer(arrowType); + case DECIMAL: + return getDecimalWriter(arrowType); + case FIXEDSIZEBINARY: + return getFixedSizeBinaryWriter(arrowType); + case VARBINARY: + return getVarBinaryWriter(); + case VARCHAR: + return getVarCharWriter(); + case LARGEVARCHAR: + return getLargeVarCharWriter(); + case LARGEVARBINARY: + return getLargeVarBinaryWriter(); + case BIT: + return getBitWriter(); + default: + throw new UnsupportedOperationException("Unknown type: " + minorType); + } + } + + private TinyIntWriter tinyIntWriter; + + private TinyIntWriter getTinyIntWriter() { + if (tinyIntWriter == null) { + tinyIntWriter = new TinyIntWriterImpl(data.getTinyIntVector()); + tinyIntWriter.setPosition(idx()); + writers.add(tinyIntWriter); + } + return tinyIntWriter; + } + + public TinyIntWriter asTinyInt() { + data.setType(idx(), MinorType.TINYINT); + return getTinyIntWriter(); + } + + @Override + public void write(TinyIntHolder holder) { + data.setType(idx(), MinorType.TINYINT); + getTinyIntWriter().setPosition(idx()); + getTinyIntWriter().writeTinyInt(holder.value); + } + + public void writeTinyInt(byte value) { + data.setType(idx(), MinorType.TINYINT); + getTinyIntWriter().setPosition(idx()); + getTinyIntWriter().writeTinyInt(value); + } + + private UInt1Writer uInt1Writer; + + private UInt1Writer getUInt1Writer() { + if (uInt1Writer == null) { + uInt1Writer = new UInt1WriterImpl(data.getUInt1Vector()); + uInt1Writer.setPosition(idx()); + writers.add(uInt1Writer); + } + return uInt1Writer; + } + + public UInt1Writer asUInt1() { + data.setType(idx(), MinorType.UINT1); + return getUInt1Writer(); + } + + @Override + public void write(UInt1Holder holder) { + data.setType(idx(), MinorType.UINT1); + getUInt1Writer().setPosition(idx()); + getUInt1Writer().writeUInt1(holder.value); + } + + public void writeUInt1(byte value) { + data.setType(idx(), MinorType.UINT1); + getUInt1Writer().setPosition(idx()); + getUInt1Writer().writeUInt1(value); + } + + private UInt2Writer uInt2Writer; + + private UInt2Writer getUInt2Writer() { + if (uInt2Writer == null) { + uInt2Writer = new UInt2WriterImpl(data.getUInt2Vector()); + uInt2Writer.setPosition(idx()); + writers.add(uInt2Writer); + } + return uInt2Writer; + } + + public UInt2Writer asUInt2() { + data.setType(idx(), MinorType.UINT2); + return getUInt2Writer(); + } + + @Override + public void write(UInt2Holder holder) { + data.setType(idx(), MinorType.UINT2); + getUInt2Writer().setPosition(idx()); + getUInt2Writer().writeUInt2(holder.value); + } + + public void writeUInt2(char value) { + data.setType(idx(), MinorType.UINT2); + getUInt2Writer().setPosition(idx()); + getUInt2Writer().writeUInt2(value); + } + + private SmallIntWriter smallIntWriter; + + private SmallIntWriter getSmallIntWriter() { + if (smallIntWriter == null) { + smallIntWriter = new SmallIntWriterImpl(data.getSmallIntVector()); + smallIntWriter.setPosition(idx()); + writers.add(smallIntWriter); + } + return smallIntWriter; + } + + public SmallIntWriter asSmallInt() { + data.setType(idx(), MinorType.SMALLINT); + return getSmallIntWriter(); + } + + @Override + public void write(SmallIntHolder holder) { + data.setType(idx(), MinorType.SMALLINT); + getSmallIntWriter().setPosition(idx()); + getSmallIntWriter().writeSmallInt(holder.value); + } + + public void writeSmallInt(short value) { + data.setType(idx(), MinorType.SMALLINT); + getSmallIntWriter().setPosition(idx()); + getSmallIntWriter().writeSmallInt(value); + } + + private Float2Writer float2Writer; + + private Float2Writer getFloat2Writer() { + if (float2Writer == null) { + float2Writer = new Float2WriterImpl(data.getFloat2Vector()); + float2Writer.setPosition(idx()); + writers.add(float2Writer); + } + return float2Writer; + } + + public Float2Writer asFloat2() { + data.setType(idx(), MinorType.FLOAT2); + return getFloat2Writer(); + } + + @Override + public void write(Float2Holder holder) { + data.setType(idx(), MinorType.FLOAT2); + getFloat2Writer().setPosition(idx()); + getFloat2Writer().writeFloat2(holder.value); + } + + public void writeFloat2(short value) { + data.setType(idx(), MinorType.FLOAT2); + getFloat2Writer().setPosition(idx()); + getFloat2Writer().writeFloat2(value); + } + + private IntWriter intWriter; + + private IntWriter getIntWriter() { + if (intWriter == null) { + intWriter = new IntWriterImpl(data.getIntVector()); + intWriter.setPosition(idx()); + writers.add(intWriter); + } + return intWriter; + } + + public IntWriter asInt() { + data.setType(idx(), MinorType.INT); + return getIntWriter(); + } + + @Override + public void write(IntHolder holder) { + data.setType(idx(), MinorType.INT); + getIntWriter().setPosition(idx()); + getIntWriter().writeInt(holder.value); + } + + public void writeInt(int value) { + data.setType(idx(), MinorType.INT); + getIntWriter().setPosition(idx()); + getIntWriter().writeInt(value); + } + + private UInt4Writer uInt4Writer; + + private UInt4Writer getUInt4Writer() { + if (uInt4Writer == null) { + uInt4Writer = new UInt4WriterImpl(data.getUInt4Vector()); + uInt4Writer.setPosition(idx()); + writers.add(uInt4Writer); + } + return uInt4Writer; + } + + public UInt4Writer asUInt4() { + data.setType(idx(), MinorType.UINT4); + return getUInt4Writer(); + } + + @Override + public void write(UInt4Holder holder) { + data.setType(idx(), MinorType.UINT4); + getUInt4Writer().setPosition(idx()); + getUInt4Writer().writeUInt4(holder.value); + } + + public void writeUInt4(int value) { + data.setType(idx(), MinorType.UINT4); + getUInt4Writer().setPosition(idx()); + getUInt4Writer().writeUInt4(value); + } + + private Float4Writer float4Writer; + + private Float4Writer getFloat4Writer() { + if (float4Writer == null) { + float4Writer = new Float4WriterImpl(data.getFloat4Vector()); + float4Writer.setPosition(idx()); + writers.add(float4Writer); + } + return float4Writer; + } + + public Float4Writer asFloat4() { + data.setType(idx(), MinorType.FLOAT4); + return getFloat4Writer(); + } + + @Override + public void write(Float4Holder holder) { + data.setType(idx(), MinorType.FLOAT4); + getFloat4Writer().setPosition(idx()); + getFloat4Writer().writeFloat4(holder.value); + } + + public void writeFloat4(float value) { + data.setType(idx(), MinorType.FLOAT4); + getFloat4Writer().setPosition(idx()); + getFloat4Writer().writeFloat4(value); + } + + private DateDayWriter dateDayWriter; + + private DateDayWriter getDateDayWriter() { + if (dateDayWriter == null) { + dateDayWriter = new DateDayWriterImpl(data.getDateDayVector()); + dateDayWriter.setPosition(idx()); + writers.add(dateDayWriter); + } + return dateDayWriter; + } + + public DateDayWriter asDateDay() { + data.setType(idx(), MinorType.DATEDAY); + return getDateDayWriter(); + } + + @Override + public void write(DateDayHolder holder) { + data.setType(idx(), MinorType.DATEDAY); + getDateDayWriter().setPosition(idx()); + getDateDayWriter().writeDateDay(holder.value); + } + + public void writeDateDay(int value) { + data.setType(idx(), MinorType.DATEDAY); + getDateDayWriter().setPosition(idx()); + getDateDayWriter().writeDateDay(value); + } + + private IntervalYearWriter intervalYearWriter; + + private IntervalYearWriter getIntervalYearWriter() { + if (intervalYearWriter == null) { + intervalYearWriter = new IntervalYearWriterImpl(data.getIntervalYearVector()); + intervalYearWriter.setPosition(idx()); + writers.add(intervalYearWriter); + } + return intervalYearWriter; + } + + public IntervalYearWriter asIntervalYear() { + data.setType(idx(), MinorType.INTERVALYEAR); + return getIntervalYearWriter(); + } + + @Override + public void write(IntervalYearHolder holder) { + data.setType(idx(), MinorType.INTERVALYEAR); + getIntervalYearWriter().setPosition(idx()); + getIntervalYearWriter().writeIntervalYear(holder.value); + } + + public void writeIntervalYear(int value) { + data.setType(idx(), MinorType.INTERVALYEAR); + getIntervalYearWriter().setPosition(idx()); + getIntervalYearWriter().writeIntervalYear(value); + } + + private TimeSecWriter timeSecWriter; + + private TimeSecWriter getTimeSecWriter() { + if (timeSecWriter == null) { + timeSecWriter = new TimeSecWriterImpl(data.getTimeSecVector()); + timeSecWriter.setPosition(idx()); + writers.add(timeSecWriter); + } + return timeSecWriter; + } + + public TimeSecWriter asTimeSec() { + data.setType(idx(), MinorType.TIMESEC); + return getTimeSecWriter(); + } + + @Override + public void write(TimeSecHolder holder) { + data.setType(idx(), MinorType.TIMESEC); + getTimeSecWriter().setPosition(idx()); + getTimeSecWriter().writeTimeSec(holder.value); + } + + public void writeTimeSec(int value) { + data.setType(idx(), MinorType.TIMESEC); + getTimeSecWriter().setPosition(idx()); + getTimeSecWriter().writeTimeSec(value); + } + + private TimeMilliWriter timeMilliWriter; + + private TimeMilliWriter getTimeMilliWriter() { + if (timeMilliWriter == null) { + timeMilliWriter = new TimeMilliWriterImpl(data.getTimeMilliVector()); + timeMilliWriter.setPosition(idx()); + writers.add(timeMilliWriter); + } + return timeMilliWriter; + } + + public TimeMilliWriter asTimeMilli() { + data.setType(idx(), MinorType.TIMEMILLI); + return getTimeMilliWriter(); + } + + @Override + public void write(TimeMilliHolder holder) { + data.setType(idx(), MinorType.TIMEMILLI); + getTimeMilliWriter().setPosition(idx()); + getTimeMilliWriter().writeTimeMilli(holder.value); + } + + public void writeTimeMilli(int value) { + data.setType(idx(), MinorType.TIMEMILLI); + getTimeMilliWriter().setPosition(idx()); + getTimeMilliWriter().writeTimeMilli(value); + } + + private BigIntWriter bigIntWriter; + + private BigIntWriter getBigIntWriter() { + if (bigIntWriter == null) { + bigIntWriter = new BigIntWriterImpl(data.getBigIntVector()); + bigIntWriter.setPosition(idx()); + writers.add(bigIntWriter); + } + return bigIntWriter; + } + + public BigIntWriter asBigInt() { + data.setType(idx(), MinorType.BIGINT); + return getBigIntWriter(); + } + + @Override + public void write(BigIntHolder holder) { + data.setType(idx(), MinorType.BIGINT); + getBigIntWriter().setPosition(idx()); + getBigIntWriter().writeBigInt(holder.value); + } + + public void writeBigInt(long value) { + data.setType(idx(), MinorType.BIGINT); + getBigIntWriter().setPosition(idx()); + getBigIntWriter().writeBigInt(value); + } + + private UInt8Writer uInt8Writer; + + private UInt8Writer getUInt8Writer() { + if (uInt8Writer == null) { + uInt8Writer = new UInt8WriterImpl(data.getUInt8Vector()); + uInt8Writer.setPosition(idx()); + writers.add(uInt8Writer); + } + return uInt8Writer; + } + + public UInt8Writer asUInt8() { + data.setType(idx(), MinorType.UINT8); + return getUInt8Writer(); + } + + @Override + public void write(UInt8Holder holder) { + data.setType(idx(), MinorType.UINT8); + getUInt8Writer().setPosition(idx()); + getUInt8Writer().writeUInt8(holder.value); + } + + public void writeUInt8(long value) { + data.setType(idx(), MinorType.UINT8); + getUInt8Writer().setPosition(idx()); + getUInt8Writer().writeUInt8(value); + } + + private Float8Writer float8Writer; + + private Float8Writer getFloat8Writer() { + if (float8Writer == null) { + float8Writer = new Float8WriterImpl(data.getFloat8Vector()); + float8Writer.setPosition(idx()); + writers.add(float8Writer); + } + return float8Writer; + } + + public Float8Writer asFloat8() { + data.setType(idx(), MinorType.FLOAT8); + return getFloat8Writer(); + } + + @Override + public void write(Float8Holder holder) { + data.setType(idx(), MinorType.FLOAT8); + getFloat8Writer().setPosition(idx()); + getFloat8Writer().writeFloat8(holder.value); + } + + public void writeFloat8(double value) { + data.setType(idx(), MinorType.FLOAT8); + getFloat8Writer().setPosition(idx()); + getFloat8Writer().writeFloat8(value); + } + + private DateMilliWriter dateMilliWriter; + + private DateMilliWriter getDateMilliWriter() { + if (dateMilliWriter == null) { + dateMilliWriter = new DateMilliWriterImpl(data.getDateMilliVector()); + dateMilliWriter.setPosition(idx()); + writers.add(dateMilliWriter); + } + return dateMilliWriter; + } + + public DateMilliWriter asDateMilli() { + data.setType(idx(), MinorType.DATEMILLI); + return getDateMilliWriter(); + } + + @Override + public void write(DateMilliHolder holder) { + data.setType(idx(), MinorType.DATEMILLI); + getDateMilliWriter().setPosition(idx()); + getDateMilliWriter().writeDateMilli(holder.value); + } + + public void writeDateMilli(long value) { + data.setType(idx(), MinorType.DATEMILLI); + getDateMilliWriter().setPosition(idx()); + getDateMilliWriter().writeDateMilli(value); + } + + private DurationWriter durationWriter; + + private DurationWriter getDurationWriter(ArrowType arrowType) { + if (durationWriter == null) { + durationWriter = new DurationWriterImpl(data.getDurationVector(arrowType)); + durationWriter.setPosition(idx()); + writers.add(durationWriter); + } + return durationWriter; + } + + public DurationWriter asDuration(ArrowType arrowType) { + data.setType(idx(), MinorType.DURATION); + return getDurationWriter(arrowType); + } + + @Override + public void write(DurationHolder holder) { + data.setType(idx(), MinorType.DURATION); + ArrowType arrowType = new ArrowType.Duration(holder.unit); + getDurationWriter(arrowType).setPosition(idx()); + getDurationWriter(arrowType).write(holder); + } + + public void writeDuration(long value) { + data.setType(idx(), MinorType.DURATION); + // This is expected to throw. There's nothing more that we can do here since we can't infer any + // sort of default unit for the Duration or a default width for the FixedSizeBinary types. + ArrowType arrowType = MinorType.DURATION.getType(); + getDurationWriter(arrowType).setPosition(idx()); + getDurationWriter(arrowType).writeDuration(value); + } + + private TimeStampSecWriter timeStampSecWriter; + + private TimeStampSecWriter getTimeStampSecWriter() { + if (timeStampSecWriter == null) { + timeStampSecWriter = new TimeStampSecWriterImpl(data.getTimeStampSecVector()); + timeStampSecWriter.setPosition(idx()); + writers.add(timeStampSecWriter); + } + return timeStampSecWriter; + } + + public TimeStampSecWriter asTimeStampSec() { + data.setType(idx(), MinorType.TIMESTAMPSEC); + return getTimeStampSecWriter(); + } + + @Override + public void write(TimeStampSecHolder holder) { + data.setType(idx(), MinorType.TIMESTAMPSEC); + getTimeStampSecWriter().setPosition(idx()); + getTimeStampSecWriter().writeTimeStampSec(holder.value); + } + + public void writeTimeStampSec(long value) { + data.setType(idx(), MinorType.TIMESTAMPSEC); + getTimeStampSecWriter().setPosition(idx()); + getTimeStampSecWriter().writeTimeStampSec(value); + } + + private TimeStampMilliWriter timeStampMilliWriter; + + private TimeStampMilliWriter getTimeStampMilliWriter() { + if (timeStampMilliWriter == null) { + timeStampMilliWriter = new TimeStampMilliWriterImpl(data.getTimeStampMilliVector()); + timeStampMilliWriter.setPosition(idx()); + writers.add(timeStampMilliWriter); + } + return timeStampMilliWriter; + } + + public TimeStampMilliWriter asTimeStampMilli() { + data.setType(idx(), MinorType.TIMESTAMPMILLI); + return getTimeStampMilliWriter(); + } + + @Override + public void write(TimeStampMilliHolder holder) { + data.setType(idx(), MinorType.TIMESTAMPMILLI); + getTimeStampMilliWriter().setPosition(idx()); + getTimeStampMilliWriter().writeTimeStampMilli(holder.value); + } + + public void writeTimeStampMilli(long value) { + data.setType(idx(), MinorType.TIMESTAMPMILLI); + getTimeStampMilliWriter().setPosition(idx()); + getTimeStampMilliWriter().writeTimeStampMilli(value); + } + + private TimeStampMicroWriter timeStampMicroWriter; + + private TimeStampMicroWriter getTimeStampMicroWriter() { + if (timeStampMicroWriter == null) { + timeStampMicroWriter = new TimeStampMicroWriterImpl(data.getTimeStampMicroVector()); + timeStampMicroWriter.setPosition(idx()); + writers.add(timeStampMicroWriter); + } + return timeStampMicroWriter; + } + + public TimeStampMicroWriter asTimeStampMicro() { + data.setType(idx(), MinorType.TIMESTAMPMICRO); + return getTimeStampMicroWriter(); + } + + @Override + public void write(TimeStampMicroHolder holder) { + data.setType(idx(), MinorType.TIMESTAMPMICRO); + getTimeStampMicroWriter().setPosition(idx()); + getTimeStampMicroWriter().writeTimeStampMicro(holder.value); + } + + public void writeTimeStampMicro(long value) { + data.setType(idx(), MinorType.TIMESTAMPMICRO); + getTimeStampMicroWriter().setPosition(idx()); + getTimeStampMicroWriter().writeTimeStampMicro(value); + } + + private TimeStampNanoWriter timeStampNanoWriter; + + private TimeStampNanoWriter getTimeStampNanoWriter() { + if (timeStampNanoWriter == null) { + timeStampNanoWriter = new TimeStampNanoWriterImpl(data.getTimeStampNanoVector()); + timeStampNanoWriter.setPosition(idx()); + writers.add(timeStampNanoWriter); + } + return timeStampNanoWriter; + } + + public TimeStampNanoWriter asTimeStampNano() { + data.setType(idx(), MinorType.TIMESTAMPNANO); + return getTimeStampNanoWriter(); + } + + @Override + public void write(TimeStampNanoHolder holder) { + data.setType(idx(), MinorType.TIMESTAMPNANO); + getTimeStampNanoWriter().setPosition(idx()); + getTimeStampNanoWriter().writeTimeStampNano(holder.value); + } + + public void writeTimeStampNano(long value) { + data.setType(idx(), MinorType.TIMESTAMPNANO); + getTimeStampNanoWriter().setPosition(idx()); + getTimeStampNanoWriter().writeTimeStampNano(value); + } + + private TimeStampSecTZWriter timeStampSecTZWriter; + + private TimeStampSecTZWriter getTimeStampSecTZWriter(ArrowType arrowType) { + if (timeStampSecTZWriter == null) { + timeStampSecTZWriter = new TimeStampSecTZWriterImpl(data.getTimeStampSecTZVector(arrowType)); + timeStampSecTZWriter.setPosition(idx()); + writers.add(timeStampSecTZWriter); + } + return timeStampSecTZWriter; + } + + public TimeStampSecTZWriter asTimeStampSecTZ(ArrowType arrowType) { + data.setType(idx(), MinorType.TIMESTAMPSECTZ); + return getTimeStampSecTZWriter(arrowType); + } + + @Override + public void write(TimeStampSecTZHolder holder) { + data.setType(idx(), MinorType.TIMESTAMPSECTZ); + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.TIMESTAMPSEC.getType(); + ArrowType arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), holder.timezone); + getTimeStampSecTZWriter(arrowType).setPosition(idx()); + getTimeStampSecTZWriter(arrowType).write(holder); + } + + public void writeTimeStampSecTZ(long value) { + data.setType(idx(), MinorType.TIMESTAMPSECTZ); + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.TIMESTAMPSEC.getType(); + ArrowType arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), "UTC"); + getTimeStampSecTZWriter(arrowType).setPosition(idx()); + getTimeStampSecTZWriter(arrowType).writeTimeStampSecTZ(value); + } + + private TimeStampMilliTZWriter timeStampMilliTZWriter; + + private TimeStampMilliTZWriter getTimeStampMilliTZWriter(ArrowType arrowType) { + if (timeStampMilliTZWriter == null) { + timeStampMilliTZWriter = new TimeStampMilliTZWriterImpl(data.getTimeStampMilliTZVector(arrowType)); + timeStampMilliTZWriter.setPosition(idx()); + writers.add(timeStampMilliTZWriter); + } + return timeStampMilliTZWriter; + } + + public TimeStampMilliTZWriter asTimeStampMilliTZ(ArrowType arrowType) { + data.setType(idx(), MinorType.TIMESTAMPMILLITZ); + return getTimeStampMilliTZWriter(arrowType); + } + + @Override + public void write(TimeStampMilliTZHolder holder) { + data.setType(idx(), MinorType.TIMESTAMPMILLITZ); + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.TIMESTAMPMILLI.getType(); + ArrowType arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), holder.timezone); + getTimeStampMilliTZWriter(arrowType).setPosition(idx()); + getTimeStampMilliTZWriter(arrowType).write(holder); + } + + public void writeTimeStampMilliTZ(long value) { + data.setType(idx(), MinorType.TIMESTAMPMILLITZ); + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.TIMESTAMPMILLI.getType(); + ArrowType arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), "UTC"); + getTimeStampMilliTZWriter(arrowType).setPosition(idx()); + getTimeStampMilliTZWriter(arrowType).writeTimeStampMilliTZ(value); + } + + private TimeStampMicroTZWriter timeStampMicroTZWriter; + + private TimeStampMicroTZWriter getTimeStampMicroTZWriter(ArrowType arrowType) { + if (timeStampMicroTZWriter == null) { + timeStampMicroTZWriter = new TimeStampMicroTZWriterImpl(data.getTimeStampMicroTZVector(arrowType)); + timeStampMicroTZWriter.setPosition(idx()); + writers.add(timeStampMicroTZWriter); + } + return timeStampMicroTZWriter; + } + + public TimeStampMicroTZWriter asTimeStampMicroTZ(ArrowType arrowType) { + data.setType(idx(), MinorType.TIMESTAMPMICROTZ); + return getTimeStampMicroTZWriter(arrowType); + } + + @Override + public void write(TimeStampMicroTZHolder holder) { + data.setType(idx(), MinorType.TIMESTAMPMICROTZ); + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.TIMESTAMPMICRO.getType(); + ArrowType arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), holder.timezone); + getTimeStampMicroTZWriter(arrowType).setPosition(idx()); + getTimeStampMicroTZWriter(arrowType).write(holder); + } + + public void writeTimeStampMicroTZ(long value) { + data.setType(idx(), MinorType.TIMESTAMPMICROTZ); + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.TIMESTAMPMICRO.getType(); + ArrowType arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), "UTC"); + getTimeStampMicroTZWriter(arrowType).setPosition(idx()); + getTimeStampMicroTZWriter(arrowType).writeTimeStampMicroTZ(value); + } + + private TimeStampNanoTZWriter timeStampNanoTZWriter; + + private TimeStampNanoTZWriter getTimeStampNanoTZWriter(ArrowType arrowType) { + if (timeStampNanoTZWriter == null) { + timeStampNanoTZWriter = new TimeStampNanoTZWriterImpl(data.getTimeStampNanoTZVector(arrowType)); + timeStampNanoTZWriter.setPosition(idx()); + writers.add(timeStampNanoTZWriter); + } + return timeStampNanoTZWriter; + } + + public TimeStampNanoTZWriter asTimeStampNanoTZ(ArrowType arrowType) { + data.setType(idx(), MinorType.TIMESTAMPNANOTZ); + return getTimeStampNanoTZWriter(arrowType); + } + + @Override + public void write(TimeStampNanoTZHolder holder) { + data.setType(idx(), MinorType.TIMESTAMPNANOTZ); + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.TIMESTAMPNANO.getType(); + ArrowType arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), holder.timezone); + getTimeStampNanoTZWriter(arrowType).setPosition(idx()); + getTimeStampNanoTZWriter(arrowType).write(holder); + } + + public void writeTimeStampNanoTZ(long value) { + data.setType(idx(), MinorType.TIMESTAMPNANOTZ); + ArrowType.Timestamp arrowTypeWithoutTz = (ArrowType.Timestamp) MinorType.TIMESTAMPNANO.getType(); + ArrowType arrowType = new ArrowType.Timestamp(arrowTypeWithoutTz.getUnit(), "UTC"); + getTimeStampNanoTZWriter(arrowType).setPosition(idx()); + getTimeStampNanoTZWriter(arrowType).writeTimeStampNanoTZ(value); + } + + private TimeMicroWriter timeMicroWriter; + + private TimeMicroWriter getTimeMicroWriter() { + if (timeMicroWriter == null) { + timeMicroWriter = new TimeMicroWriterImpl(data.getTimeMicroVector()); + timeMicroWriter.setPosition(idx()); + writers.add(timeMicroWriter); + } + return timeMicroWriter; + } + + public TimeMicroWriter asTimeMicro() { + data.setType(idx(), MinorType.TIMEMICRO); + return getTimeMicroWriter(); + } + + @Override + public void write(TimeMicroHolder holder) { + data.setType(idx(), MinorType.TIMEMICRO); + getTimeMicroWriter().setPosition(idx()); + getTimeMicroWriter().writeTimeMicro(holder.value); + } + + public void writeTimeMicro(long value) { + data.setType(idx(), MinorType.TIMEMICRO); + getTimeMicroWriter().setPosition(idx()); + getTimeMicroWriter().writeTimeMicro(value); + } + + private TimeNanoWriter timeNanoWriter; + + private TimeNanoWriter getTimeNanoWriter() { + if (timeNanoWriter == null) { + timeNanoWriter = new TimeNanoWriterImpl(data.getTimeNanoVector()); + timeNanoWriter.setPosition(idx()); + writers.add(timeNanoWriter); + } + return timeNanoWriter; + } + + public TimeNanoWriter asTimeNano() { + data.setType(idx(), MinorType.TIMENANO); + return getTimeNanoWriter(); + } + + @Override + public void write(TimeNanoHolder holder) { + data.setType(idx(), MinorType.TIMENANO); + getTimeNanoWriter().setPosition(idx()); + getTimeNanoWriter().writeTimeNano(holder.value); + } + + public void writeTimeNano(long value) { + data.setType(idx(), MinorType.TIMENANO); + getTimeNanoWriter().setPosition(idx()); + getTimeNanoWriter().writeTimeNano(value); + } + + private IntervalDayWriter intervalDayWriter; + + private IntervalDayWriter getIntervalDayWriter() { + if (intervalDayWriter == null) { + intervalDayWriter = new IntervalDayWriterImpl(data.getIntervalDayVector()); + intervalDayWriter.setPosition(idx()); + writers.add(intervalDayWriter); + } + return intervalDayWriter; + } + + public IntervalDayWriter asIntervalDay() { + data.setType(idx(), MinorType.INTERVALDAY); + return getIntervalDayWriter(); + } + + @Override + public void write(IntervalDayHolder holder) { + data.setType(idx(), MinorType.INTERVALDAY); + getIntervalDayWriter().setPosition(idx()); + getIntervalDayWriter().writeIntervalDay(holder.days, holder.milliseconds); + } + + public void writeIntervalDay(int days, int milliseconds) { + data.setType(idx(), MinorType.INTERVALDAY); + getIntervalDayWriter().setPosition(idx()); + getIntervalDayWriter().writeIntervalDay(days, milliseconds); + } + + private IntervalMonthDayNanoWriter intervalMonthDayNanoWriter; + + private IntervalMonthDayNanoWriter getIntervalMonthDayNanoWriter() { + if (intervalMonthDayNanoWriter == null) { + intervalMonthDayNanoWriter = new IntervalMonthDayNanoWriterImpl(data.getIntervalMonthDayNanoVector()); + intervalMonthDayNanoWriter.setPosition(idx()); + writers.add(intervalMonthDayNanoWriter); + } + return intervalMonthDayNanoWriter; + } + + public IntervalMonthDayNanoWriter asIntervalMonthDayNano() { + data.setType(idx(), MinorType.INTERVALMONTHDAYNANO); + return getIntervalMonthDayNanoWriter(); + } + + @Override + public void write(IntervalMonthDayNanoHolder holder) { + data.setType(idx(), MinorType.INTERVALMONTHDAYNANO); + getIntervalMonthDayNanoWriter().setPosition(idx()); + getIntervalMonthDayNanoWriter().writeIntervalMonthDayNano(holder.months, holder.days, holder.nanoseconds); + } + + public void writeIntervalMonthDayNano(int months, int days, long nanoseconds) { + data.setType(idx(), MinorType.INTERVALMONTHDAYNANO); + getIntervalMonthDayNanoWriter().setPosition(idx()); + getIntervalMonthDayNanoWriter().writeIntervalMonthDayNano(months, days, nanoseconds); + } + + private Decimal256Writer decimal256Writer; + + private Decimal256Writer getDecimal256Writer(ArrowType arrowType) { + if (decimal256Writer == null) { + decimal256Writer = new Decimal256WriterImpl(data.getDecimal256Vector(arrowType)); + decimal256Writer.setPosition(idx()); + writers.add(decimal256Writer); + } + return decimal256Writer; + } + + public Decimal256Writer asDecimal256(ArrowType arrowType) { + data.setType(idx(), MinorType.DECIMAL256); + return getDecimal256Writer(arrowType); + } + + @Override + public void write(Decimal256Holder holder) { + data.setType(idx(), MinorType.DECIMAL256); + ArrowType arrowType = new ArrowType.Decimal(holder.precision, holder.scale, Decimal256Holder.WIDTH * 8); + getDecimal256Writer(arrowType).setPosition(idx()); + getDecimal256Writer(arrowType).writeDecimal256(holder.start, holder.buffer, arrowType); + } + + public void writeDecimal256(long start, ArrowBuf buffer, ArrowType arrowType) { + data.setType(idx(), MinorType.DECIMAL256); + getDecimal256Writer(arrowType).setPosition(idx()); + getDecimal256Writer(arrowType).writeDecimal256(start, buffer, arrowType); + } + public void writeDecimal256(BigDecimal value) { + data.setType(idx(), MinorType.DECIMAL256); + ArrowType arrowType = new ArrowType.Decimal(value.precision(), value.scale(), Decimal256Vector.TYPE_WIDTH * 8); + getDecimal256Writer(arrowType).setPosition(idx()); + getDecimal256Writer(arrowType).writeDecimal256(value); + } + + public void writeBigEndianBytesToDecimal256(byte[] value, ArrowType arrowType) { + data.setType(idx(), MinorType.DECIMAL256); + getDecimal256Writer(arrowType).setPosition(idx()); + getDecimal256Writer(arrowType).writeBigEndianBytesToDecimal256(value, arrowType); + } + + private DecimalWriter decimalWriter; + + private DecimalWriter getDecimalWriter(ArrowType arrowType) { + if (decimalWriter == null) { + decimalWriter = new DecimalWriterImpl(data.getDecimalVector(arrowType)); + decimalWriter.setPosition(idx()); + writers.add(decimalWriter); + } + return decimalWriter; + } + + public DecimalWriter asDecimal(ArrowType arrowType) { + data.setType(idx(), MinorType.DECIMAL); + return getDecimalWriter(arrowType); + } + + @Override + public void write(DecimalHolder holder) { + data.setType(idx(), MinorType.DECIMAL); + ArrowType arrowType = new ArrowType.Decimal(holder.precision, holder.scale, DecimalHolder.WIDTH * 8); + getDecimalWriter(arrowType).setPosition(idx()); + getDecimalWriter(arrowType).writeDecimal(holder.start, holder.buffer, arrowType); + } + + public void writeDecimal(long start, ArrowBuf buffer, ArrowType arrowType) { + data.setType(idx(), MinorType.DECIMAL); + getDecimalWriter(arrowType).setPosition(idx()); + getDecimalWriter(arrowType).writeDecimal(start, buffer, arrowType); + } + public void writeDecimal(BigDecimal value) { + data.setType(idx(), MinorType.DECIMAL); + ArrowType arrowType = new ArrowType.Decimal(value.precision(), value.scale(), DecimalVector.TYPE_WIDTH * 8); + getDecimalWriter(arrowType).setPosition(idx()); + getDecimalWriter(arrowType).writeDecimal(value); + } + + public void writeBigEndianBytesToDecimal(byte[] value, ArrowType arrowType) { + data.setType(idx(), MinorType.DECIMAL); + getDecimalWriter(arrowType).setPosition(idx()); + getDecimalWriter(arrowType).writeBigEndianBytesToDecimal(value, arrowType); + } + + private FixedSizeBinaryWriter fixedSizeBinaryWriter; + + private FixedSizeBinaryWriter getFixedSizeBinaryWriter(ArrowType arrowType) { + if (fixedSizeBinaryWriter == null) { + fixedSizeBinaryWriter = new FixedSizeBinaryWriterImpl(data.getFixedSizeBinaryVector(arrowType)); + fixedSizeBinaryWriter.setPosition(idx()); + writers.add(fixedSizeBinaryWriter); + } + return fixedSizeBinaryWriter; + } + + public FixedSizeBinaryWriter asFixedSizeBinary(ArrowType arrowType) { + data.setType(idx(), MinorType.FIXEDSIZEBINARY); + return getFixedSizeBinaryWriter(arrowType); + } + + @Override + public void write(FixedSizeBinaryHolder holder) { + data.setType(idx(), MinorType.FIXEDSIZEBINARY); + ArrowType arrowType = new ArrowType.FixedSizeBinary(holder.byteWidth); + getFixedSizeBinaryWriter(arrowType).setPosition(idx()); + getFixedSizeBinaryWriter(arrowType).write(holder); + } + + public void writeFixedSizeBinary(ArrowBuf buffer) { + data.setType(idx(), MinorType.FIXEDSIZEBINARY); + // This is expected to throw. There's nothing more that we can do here since we can't infer any + // sort of default unit for the Duration or a default width for the FixedSizeBinary types. + ArrowType arrowType = MinorType.FIXEDSIZEBINARY.getType(); + getFixedSizeBinaryWriter(arrowType).setPosition(idx()); + getFixedSizeBinaryWriter(arrowType).writeFixedSizeBinary(buffer); + } + + private VarBinaryWriter varBinaryWriter; + + private VarBinaryWriter getVarBinaryWriter() { + if (varBinaryWriter == null) { + varBinaryWriter = new VarBinaryWriterImpl(data.getVarBinaryVector()); + varBinaryWriter.setPosition(idx()); + writers.add(varBinaryWriter); + } + return varBinaryWriter; + } + + public VarBinaryWriter asVarBinary() { + data.setType(idx(), MinorType.VARBINARY); + return getVarBinaryWriter(); + } + + @Override + public void write(VarBinaryHolder holder) { + data.setType(idx(), MinorType.VARBINARY); + getVarBinaryWriter().setPosition(idx()); + getVarBinaryWriter().writeVarBinary(holder.start, holder.end, holder.buffer); + } + + public void writeVarBinary(int start, int end, ArrowBuf buffer) { + data.setType(idx(), MinorType.VARBINARY); + getVarBinaryWriter().setPosition(idx()); + getVarBinaryWriter().writeVarBinary(start, end, buffer); + } + @Override + public void writeVarBinary(byte[] value) { + getVarBinaryWriter().setPosition(idx()); + getVarBinaryWriter().writeVarBinary(value); + } + + @Override + public void writeVarBinary(byte[] value, int offset, int length) { + getVarBinaryWriter().setPosition(idx()); + getVarBinaryWriter().writeVarBinary(value, offset, length); + } + + @Override + public void writeVarBinary(ByteBuffer value) { + getVarBinaryWriter().setPosition(idx()); + getVarBinaryWriter().writeVarBinary(value); + } + + @Override + public void writeVarBinary(ByteBuffer value, int offset, int length) { + getVarBinaryWriter().setPosition(idx()); + getVarBinaryWriter().writeVarBinary(value, offset, length); + } + + private VarCharWriter varCharWriter; + + private VarCharWriter getVarCharWriter() { + if (varCharWriter == null) { + varCharWriter = new VarCharWriterImpl(data.getVarCharVector()); + varCharWriter.setPosition(idx()); + writers.add(varCharWriter); + } + return varCharWriter; + } + + public VarCharWriter asVarChar() { + data.setType(idx(), MinorType.VARCHAR); + return getVarCharWriter(); + } + + @Override + public void write(VarCharHolder holder) { + data.setType(idx(), MinorType.VARCHAR); + getVarCharWriter().setPosition(idx()); + getVarCharWriter().writeVarChar(holder.start, holder.end, holder.buffer); + } + + public void writeVarChar(int start, int end, ArrowBuf buffer) { + data.setType(idx(), MinorType.VARCHAR); + getVarCharWriter().setPosition(idx()); + getVarCharWriter().writeVarChar(start, end, buffer); + } + @Override + public void writeVarChar(Text value) { + getVarCharWriter().setPosition(idx()); + getVarCharWriter().writeVarChar(value); + } + + @Override + public void writeVarChar(String value) { + getVarCharWriter().setPosition(idx()); + getVarCharWriter().writeVarChar(value); + } + + private LargeVarCharWriter largeVarCharWriter; + + private LargeVarCharWriter getLargeVarCharWriter() { + if (largeVarCharWriter == null) { + largeVarCharWriter = new LargeVarCharWriterImpl(data.getLargeVarCharVector()); + largeVarCharWriter.setPosition(idx()); + writers.add(largeVarCharWriter); + } + return largeVarCharWriter; + } + + public LargeVarCharWriter asLargeVarChar() { + data.setType(idx(), MinorType.LARGEVARCHAR); + return getLargeVarCharWriter(); + } + + @Override + public void write(LargeVarCharHolder holder) { + data.setType(idx(), MinorType.LARGEVARCHAR); + getLargeVarCharWriter().setPosition(idx()); + getLargeVarCharWriter().writeLargeVarChar(holder.start, holder.end, holder.buffer); + } + + public void writeLargeVarChar(long start, long end, ArrowBuf buffer) { + data.setType(idx(), MinorType.LARGEVARCHAR); + getLargeVarCharWriter().setPosition(idx()); + getLargeVarCharWriter().writeLargeVarChar(start, end, buffer); + } + @Override + public void writeLargeVarChar(Text value) { + getLargeVarCharWriter().setPosition(idx()); + getLargeVarCharWriter().writeLargeVarChar(value); + } + + @Override + public void writeLargeVarChar(String value) { + getLargeVarCharWriter().setPosition(idx()); + getLargeVarCharWriter().writeLargeVarChar(value); + } + + private LargeVarBinaryWriter largeVarBinaryWriter; + + private LargeVarBinaryWriter getLargeVarBinaryWriter() { + if (largeVarBinaryWriter == null) { + largeVarBinaryWriter = new LargeVarBinaryWriterImpl(data.getLargeVarBinaryVector()); + largeVarBinaryWriter.setPosition(idx()); + writers.add(largeVarBinaryWriter); + } + return largeVarBinaryWriter; + } + + public LargeVarBinaryWriter asLargeVarBinary() { + data.setType(idx(), MinorType.LARGEVARBINARY); + return getLargeVarBinaryWriter(); + } + + @Override + public void write(LargeVarBinaryHolder holder) { + data.setType(idx(), MinorType.LARGEVARBINARY); + getLargeVarBinaryWriter().setPosition(idx()); + getLargeVarBinaryWriter().writeLargeVarBinary(holder.start, holder.end, holder.buffer); + } + + public void writeLargeVarBinary(long start, long end, ArrowBuf buffer) { + data.setType(idx(), MinorType.LARGEVARBINARY); + getLargeVarBinaryWriter().setPosition(idx()); + getLargeVarBinaryWriter().writeLargeVarBinary(start, end, buffer); + } + @Override + public void writeLargeVarBinary(byte[] value) { + getLargeVarBinaryWriter().setPosition(idx()); + getLargeVarBinaryWriter().writeLargeVarBinary(value); + } + + @Override + public void writeLargeVarBinary(byte[] value, int offset, int length) { + getLargeVarBinaryWriter().setPosition(idx()); + getLargeVarBinaryWriter().writeLargeVarBinary(value, offset, length); + } + + @Override + public void writeLargeVarBinary(ByteBuffer value) { + getLargeVarBinaryWriter().setPosition(idx()); + getLargeVarBinaryWriter().writeLargeVarBinary(value); + } + + @Override + public void writeLargeVarBinary(ByteBuffer value, int offset, int length) { + getLargeVarBinaryWriter().setPosition(idx()); + getLargeVarBinaryWriter().writeLargeVarBinary(value, offset, length); + } + + private BitWriter bitWriter; + + private BitWriter getBitWriter() { + if (bitWriter == null) { + bitWriter = new BitWriterImpl(data.getBitVector()); + bitWriter.setPosition(idx()); + writers.add(bitWriter); + } + return bitWriter; + } + + public BitWriter asBit() { + data.setType(idx(), MinorType.BIT); + return getBitWriter(); + } + + @Override + public void write(BitHolder holder) { + data.setType(idx(), MinorType.BIT); + getBitWriter().setPosition(idx()); + getBitWriter().writeBit(holder.value); + } + + public void writeBit(int value) { + data.setType(idx(), MinorType.BIT); + getBitWriter().setPosition(idx()); + getBitWriter().writeBit(value); + } + + public void writeNull() { + } + + @Override + public StructWriter struct() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().struct(); + } + + @Override + public ListWriter list() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().list(); + } + + @Override + public ListWriter list(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().list(name); + } + + @Override + public StructWriter struct(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().struct(name); + } + + @Override + public MapWriter map() { + data.setType(idx(), MinorType.MAP); + getListWriter().setPosition(idx()); + return getListWriter().map(); + } + + @Override + public MapWriter map(boolean keysSorted) { + data.setType(idx(), MinorType.MAP); + getListWriter().setPosition(idx()); + return getListWriter().map(keysSorted); + } + + @Override + public MapWriter map(String name) { + data.setType(idx(), MinorType.MAP); + getStructWriter().setPosition(idx()); + return getStructWriter().map(name); + } + + @Override + public MapWriter map(String name, boolean keysSorted) { + data.setType(idx(), MinorType.MAP); + getStructWriter().setPosition(idx()); + return getStructWriter().map(name, keysSorted); + } + + @Override + public TinyIntWriter tinyInt(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().tinyInt(name); + } + + @Override + public TinyIntWriter tinyInt() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().tinyInt(); + } + @Override + public UInt1Writer uInt1(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().uInt1(name); + } + + @Override + public UInt1Writer uInt1() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().uInt1(); + } + @Override + public UInt2Writer uInt2(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().uInt2(name); + } + + @Override + public UInt2Writer uInt2() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().uInt2(); + } + @Override + public SmallIntWriter smallInt(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().smallInt(name); + } + + @Override + public SmallIntWriter smallInt() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().smallInt(); + } + @Override + public Float2Writer float2(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().float2(name); + } + + @Override + public Float2Writer float2() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().float2(); + } + @Override + public IntWriter integer(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().integer(name); + } + + @Override + public IntWriter integer() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().integer(); + } + @Override + public UInt4Writer uInt4(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().uInt4(name); + } + + @Override + public UInt4Writer uInt4() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().uInt4(); + } + @Override + public Float4Writer float4(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().float4(name); + } + + @Override + public Float4Writer float4() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().float4(); + } + @Override + public DateDayWriter dateDay(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().dateDay(name); + } + + @Override + public DateDayWriter dateDay() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().dateDay(); + } + @Override + public IntervalYearWriter intervalYear(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().intervalYear(name); + } + + @Override + public IntervalYearWriter intervalYear() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().intervalYear(); + } + @Override + public TimeSecWriter timeSec(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().timeSec(name); + } + + @Override + public TimeSecWriter timeSec() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().timeSec(); + } + @Override + public TimeMilliWriter timeMilli(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().timeMilli(name); + } + + @Override + public TimeMilliWriter timeMilli() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().timeMilli(); + } + @Override + public BigIntWriter bigInt(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().bigInt(name); + } + + @Override + public BigIntWriter bigInt() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().bigInt(); + } + @Override + public UInt8Writer uInt8(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().uInt8(name); + } + + @Override + public UInt8Writer uInt8() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().uInt8(); + } + @Override + public Float8Writer float8(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().float8(name); + } + + @Override + public Float8Writer float8() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().float8(); + } + @Override + public DateMilliWriter dateMilli(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().dateMilli(name); + } + + @Override + public DateMilliWriter dateMilli() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().dateMilli(); + } + @Override + public DurationWriter duration(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().duration(name); + } + + @Override + public DurationWriter duration() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().duration(); + } + @Override + public DurationWriter duration(String name, org.apache.arrow.vector.types.TimeUnit unit) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().duration(name, unit); + } + @Override + public TimeStampSecWriter timeStampSec(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().timeStampSec(name); + } + + @Override + public TimeStampSecWriter timeStampSec() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().timeStampSec(); + } + @Override + public TimeStampMilliWriter timeStampMilli(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().timeStampMilli(name); + } + + @Override + public TimeStampMilliWriter timeStampMilli() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().timeStampMilli(); + } + @Override + public TimeStampMicroWriter timeStampMicro(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().timeStampMicro(name); + } + + @Override + public TimeStampMicroWriter timeStampMicro() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().timeStampMicro(); + } + @Override + public TimeStampNanoWriter timeStampNano(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().timeStampNano(name); + } + + @Override + public TimeStampNanoWriter timeStampNano() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().timeStampNano(); + } + @Override + public TimeStampSecTZWriter timeStampSecTZ(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().timeStampSecTZ(name); + } + + @Override + public TimeStampSecTZWriter timeStampSecTZ() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().timeStampSecTZ(); + } + @Override + public TimeStampSecTZWriter timeStampSecTZ(String name, String timezone) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().timeStampSecTZ(name, timezone); + } + @Override + public TimeStampMilliTZWriter timeStampMilliTZ(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().timeStampMilliTZ(name); + } + + @Override + public TimeStampMilliTZWriter timeStampMilliTZ() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().timeStampMilliTZ(); + } + @Override + public TimeStampMilliTZWriter timeStampMilliTZ(String name, String timezone) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().timeStampMilliTZ(name, timezone); + } + @Override + public TimeStampMicroTZWriter timeStampMicroTZ(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().timeStampMicroTZ(name); + } + + @Override + public TimeStampMicroTZWriter timeStampMicroTZ() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().timeStampMicroTZ(); + } + @Override + public TimeStampMicroTZWriter timeStampMicroTZ(String name, String timezone) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().timeStampMicroTZ(name, timezone); + } + @Override + public TimeStampNanoTZWriter timeStampNanoTZ(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().timeStampNanoTZ(name); + } + + @Override + public TimeStampNanoTZWriter timeStampNanoTZ() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().timeStampNanoTZ(); + } + @Override + public TimeStampNanoTZWriter timeStampNanoTZ(String name, String timezone) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().timeStampNanoTZ(name, timezone); + } + @Override + public TimeMicroWriter timeMicro(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().timeMicro(name); + } + + @Override + public TimeMicroWriter timeMicro() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().timeMicro(); + } + @Override + public TimeNanoWriter timeNano(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().timeNano(name); + } + + @Override + public TimeNanoWriter timeNano() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().timeNano(); + } + @Override + public IntervalDayWriter intervalDay(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().intervalDay(name); + } + + @Override + public IntervalDayWriter intervalDay() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().intervalDay(); + } + @Override + public IntervalMonthDayNanoWriter intervalMonthDayNano(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().intervalMonthDayNano(name); + } + + @Override + public IntervalMonthDayNanoWriter intervalMonthDayNano() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().intervalMonthDayNano(); + } + @Override + public Decimal256Writer decimal256(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().decimal256(name); + } + + @Override + public Decimal256Writer decimal256() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().decimal256(); + } + @Override + public Decimal256Writer decimal256(String name, int scale, int precision) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().decimal256(name, scale, precision); + } + @Override + public DecimalWriter decimal(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().decimal(name); + } + + @Override + public DecimalWriter decimal() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().decimal(); + } + @Override + public DecimalWriter decimal(String name, int scale, int precision) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().decimal(name, scale, precision); + } + @Override + public FixedSizeBinaryWriter fixedSizeBinary(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().fixedSizeBinary(name); + } + + @Override + public FixedSizeBinaryWriter fixedSizeBinary() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().fixedSizeBinary(); + } + @Override + public FixedSizeBinaryWriter fixedSizeBinary(String name, int byteWidth) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().fixedSizeBinary(name, byteWidth); + } + @Override + public VarBinaryWriter varBinary(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().varBinary(name); + } + + @Override + public VarBinaryWriter varBinary() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().varBinary(); + } + @Override + public VarCharWriter varChar(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().varChar(name); + } + + @Override + public VarCharWriter varChar() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().varChar(); + } + @Override + public LargeVarCharWriter largeVarChar(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().largeVarChar(name); + } + + @Override + public LargeVarCharWriter largeVarChar() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().largeVarChar(); + } + @Override + public LargeVarBinaryWriter largeVarBinary(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().largeVarBinary(name); + } + + @Override + public LargeVarBinaryWriter largeVarBinary() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().largeVarBinary(); + } + @Override + public BitWriter bit(String name) { + data.setType(idx(), MinorType.STRUCT); + getStructWriter().setPosition(idx()); + return getStructWriter().bit(name); + } + + @Override + public BitWriter bit() { + data.setType(idx(), MinorType.LIST); + getListWriter().setPosition(idx()); + return getListWriter().bit(); + } + + @Override + public void allocate() { + data.allocateNew(); + } + + @Override + public void clear() { + data.clear(); + } + + @Override + public void close() throws Exception { + data.close(); + } + + @Override + public Field getField() { + return data.getField(); + } + + @Override + public int getValueCapacity() { + return data.getValueCapacity(); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/VarBinaryHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/VarBinaryHolderReaderImpl.java new file mode 100644 index 000000000000..6e5f8ea982c9 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/VarBinaryHolderReaderImpl.java @@ -0,0 +1,133 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class VarBinaryHolderReaderImpl extends AbstractFieldReader { + + private VarBinaryHolder holder; + public VarBinaryHolderReaderImpl(VarBinaryHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.VARBINARY; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(VarBinaryHolder h) { + h.start = holder.start; + h.end = holder.end; + h.buffer = holder.buffer; + } + + @Override + public void read(NullableVarBinaryHolder h) { + h.start = holder.start; + h.end = holder.end; + h.buffer = holder.buffer; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public byte[] readByteArray() { + + int length = holder.end - holder.start; + byte[] value = new byte [length]; + holder.buffer.getBytes(holder.start, value, 0, length); + return value; + } + + @Override + public Object readObject() { + return readByteArray(); + } + + public void copyAsValue(VarBinaryWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/VarBinaryReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/VarBinaryReaderImpl.java new file mode 100644 index 000000000000..5a437e5e19de --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/VarBinaryReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class VarBinaryReaderImpl extends AbstractFieldReader { + + private final VarBinaryVector vector; + + public VarBinaryReaderImpl(VarBinaryVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(VarBinaryWriter writer){ + VarBinaryWriterImpl impl = (VarBinaryWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + VarBinaryWriterImpl impl = (VarBinaryWriterImpl) writer.varBinary(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableVarBinaryHolder h){ + vector.get(idx(), h); + } + + public byte[] readByteArray(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/VarBinaryWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/VarBinaryWriterImpl.java new file mode 100644 index 000000000000..190940c6de8a --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/VarBinaryWriterImpl.java @@ -0,0 +1,150 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class VarBinaryWriterImpl extends AbstractFieldWriter { + + final VarBinaryVector vector; + + +public VarBinaryWriterImpl(VarBinaryVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(VarBinaryHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableVarBinaryHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeVarBinary(int start, int end, ArrowBuf buffer) { + vector.setSafe(idx(), 1, start, end, buffer); + vector.setValueCount(idx()+1); + } + + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + + public void writeVarBinary(byte[] value) { + vector.setSafe(idx(), value); + vector.setValueCount(idx() + 1); + } + + public void writeVarBinary(byte[] value, int offset, int length) { + vector.setSafe(idx(), value, offset, length); + vector.setValueCount(idx() + 1); + } + + public void writeVarBinary(ByteBuffer value) { + vector.setSafe(idx(), value, 0, value.remaining()); + vector.setValueCount(idx() + 1); + } + + public void writeVarBinary(ByteBuffer value, int offset, int length) { + vector.setSafe(idx(), value, offset, length); + vector.setValueCount(idx() + 1); + } +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/VarCharHolderReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/VarCharHolderReaderImpl.java new file mode 100644 index 000000000000..c8b9cf76ec67 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/VarCharHolderReaderImpl.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +// Source code generated using FreeMarker template HolderReaderImpl.java + +@SuppressWarnings("unused") +public class VarCharHolderReaderImpl extends AbstractFieldReader { + + private VarCharHolder holder; + public VarCharHolderReaderImpl(VarCharHolder holder) { + this.holder = holder; + } + + @Override + public int size() { + throw new UnsupportedOperationException("You can't call size on a Holder value reader."); + } + + @Override + public boolean next() { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + + } + + @Override + public void setPosition(int index) { + throw new UnsupportedOperationException("You can't call next on a single value reader."); + } + + @Override + public MinorType getMinorType() { + return MinorType.VARCHAR; + } + + @Override + public boolean isSet() { + return true; + } + + @Override + public void read(VarCharHolder h) { + h.start = holder.start; + h.end = holder.end; + h.buffer = holder.buffer; + } + + @Override + public void read(NullableVarCharHolder h) { + h.start = holder.start; + h.end = holder.end; + h.buffer = holder.buffer; + h.isSet = isSet() ? 1 : 0; + } + + // read friendly type + @Override + public Text readText() { + + int length = holder.end - holder.start; + byte[] value = new byte [length]; + holder.buffer.getBytes(holder.start, value, 0, length); + Text text = new Text(); + text.set(value); + return text; + } + + @Override + public Object readObject() { + return readText(); + } + + public void copyAsValue(VarCharWriter writer){ + writer.write(holder); + } +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/VarCharReaderImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/VarCharReaderImpl.java new file mode 100644 index 000000000000..43111dc5662f --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/VarCharReaderImpl.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public class VarCharReaderImpl extends AbstractFieldReader { + + private final VarCharVector vector; + + public VarCharReaderImpl(VarCharVector vector){ + super(); + this.vector = vector; + } + + public MinorType getMinorType(){ + return vector.getMinorType(); + } + + public Field getField(){ + return vector.getField(); + } + + public boolean isSet(){ + return !vector.isNull(idx()); + } + + public void copyAsValue(VarCharWriter writer){ + VarCharWriterImpl impl = (VarCharWriterImpl) writer; + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + public void copyAsField(String name, StructWriter writer){ + VarCharWriterImpl impl = (VarCharWriterImpl) writer.varChar(name); + impl.vector.copyFromSafe(idx(), impl.idx(), vector); + } + + + public void read(NullableVarCharHolder h){ + vector.get(idx(), h); + } + + public Text readText(){ + return vector.getObject(idx()); + } + + + public void copyValue(FieldWriter w){ + + } + + public Object readObject(){ + return (Object)vector.getObject(idx()); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/VarCharWriterImpl.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/VarCharWriterImpl.java new file mode 100644 index 000000000000..9c1c5d439220 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/impl/VarCharWriterImpl.java @@ -0,0 +1,144 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.impl; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public class VarCharWriterImpl extends AbstractFieldWriter { + + final VarCharVector vector; + + private final Text textBuffer = new Text(); + +public VarCharWriterImpl(VarCharVector vector) { + this.vector = vector; + } + + @Override + public Field getField() { + return vector.getField(); + } + + @Override + public int getValueCapacity() { + return vector.getValueCapacity(); + } + + @Override + public void allocate() { + vector.allocateNew(); + } + + @Override + public void close() { + vector.close(); + } + + @Override + public void clear() { + vector.clear(); + } + + @Override + protected int idx() { + return super.idx(); + } + + + public void write(VarCharHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void write(NullableVarCharHolder h) { + vector.setSafe(idx(), h); + vector.setValueCount(idx()+1); + } + + public void writeVarChar(int start, int end, ArrowBuf buffer) { + vector.setSafe(idx(), 1, start, end, buffer); + vector.setValueCount(idx()+1); + } + + @Override + public void writeVarChar(Text value) { + vector.setSafe(idx(), value); + vector.setValueCount(idx()+1); + } + + @Override + public void writeVarChar(String value) { + textBuffer.set(value); + vector.setSafe(idx(), textBuffer); + vector.setValueCount(idx()+1); + } + + + + public void writeNull() { + vector.setNull(idx()); + vector.setValueCount(idx()+1); + } + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/BaseReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/BaseReader.java new file mode 100644 index 000000000000..26a9e482fa72 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/BaseReader.java @@ -0,0 +1,121 @@ + + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template BaseReader.java + */ +@SuppressWarnings("unused") +public interface BaseReader extends Positionable{ + Field getField(); + MinorType getMinorType(); + void reset(); + void read(UnionHolder holder); + void read(int index, UnionHolder holder); + void copyAsValue(UnionWriter writer); + void read(DenseUnionHolder holder); + void read(int index, DenseUnionHolder holder); + void copyAsValue(DenseUnionWriter writer); + boolean isSet(); + + public interface StructReader extends BaseReader, Iterable{ + FieldReader reader(String name); + } + + public interface RepeatedStructReader extends StructReader{ + boolean next(); + int size(); + void copyAsValue(StructWriter writer); + } + + public interface ListReader extends BaseReader{ + FieldReader reader(); + } + + public interface RepeatedListReader extends ListReader{ + boolean next(); + int size(); + void copyAsValue(ListWriter writer); + } + + public interface MapReader extends BaseReader{ + FieldReader reader(); + } + + public interface RepeatedMapReader extends MapReader{ + boolean next(); + int size(); + void copyAsValue(MapWriter writer); + } + + public interface ScalarReader extends + TinyIntReader, UInt1Reader, UInt2Reader, SmallIntReader, Float2Reader, IntReader, UInt4Reader, Float4Reader, DateDayReader, IntervalYearReader, TimeSecReader, TimeMilliReader, BigIntReader, UInt8Reader, Float8Reader, DateMilliReader, DurationReader, TimeStampSecReader, TimeStampMilliReader, TimeStampMicroReader, TimeStampNanoReader, TimeStampSecTZReader, TimeStampMilliTZReader, TimeStampMicroTZReader, TimeStampNanoTZReader, TimeMicroReader, TimeNanoReader, IntervalDayReader, IntervalMonthDayNanoReader, Decimal256Reader, DecimalReader, FixedSizeBinaryReader, VarBinaryReader, VarCharReader, LargeVarCharReader, LargeVarBinaryReader, BitReader, + BaseReader {} + + interface ComplexReader{ + StructReader rootAsStruct(); + ListReader rootAsList(); + boolean rootIsStruct(); + boolean ok(); + } +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/BigIntReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/BigIntReader.java new file mode 100644 index 000000000000..8f3bc6e7433b --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/BigIntReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface BigIntReader extends BaseReader{ + + public void read(BigIntHolder h); + public void read(NullableBigIntHolder h); + public Object readObject(); + // read friendly type + public Long readLong(); + public boolean isSet(); + public void copyAsValue(BigIntWriter writer); + public void copyAsField(String name, BigIntWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/BitReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/BitReader.java new file mode 100644 index 000000000000..5faecf35a585 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/BitReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface BitReader extends BaseReader{ + + public void read(BitHolder h); + public void read(NullableBitHolder h); + public Object readObject(); + // read friendly type + public Boolean readBoolean(); + public boolean isSet(); + public void copyAsValue(BitWriter writer); + public void copyAsField(String name, BitWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/DateDayReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/DateDayReader.java new file mode 100644 index 000000000000..783d496ad662 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/DateDayReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface DateDayReader extends BaseReader{ + + public void read(DateDayHolder h); + public void read(NullableDateDayHolder h); + public Object readObject(); + // read friendly type + public Integer readInteger(); + public boolean isSet(); + public void copyAsValue(DateDayWriter writer); + public void copyAsField(String name, DateDayWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/DateMilliReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/DateMilliReader.java new file mode 100644 index 000000000000..3b7ec16f42aa --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/DateMilliReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface DateMilliReader extends BaseReader{ + + public void read(DateMilliHolder h); + public void read(NullableDateMilliHolder h); + public Object readObject(); + // read friendly type + public LocalDateTime readLocalDateTime(); + public boolean isSet(); + public void copyAsValue(DateMilliWriter writer); + public void copyAsField(String name, DateMilliWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/Decimal256Reader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/Decimal256Reader.java new file mode 100644 index 000000000000..733b92006600 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/Decimal256Reader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface Decimal256Reader extends BaseReader{ + + public void read(Decimal256Holder h); + public void read(NullableDecimal256Holder h); + public Object readObject(); + // read friendly type + public BigDecimal readBigDecimal(); + public boolean isSet(); + public void copyAsValue(Decimal256Writer writer); + public void copyAsField(String name, Decimal256Writer writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/DecimalReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/DecimalReader.java new file mode 100644 index 000000000000..46a96b4babd0 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/DecimalReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface DecimalReader extends BaseReader{ + + public void read(DecimalHolder h); + public void read(NullableDecimalHolder h); + public Object readObject(); + // read friendly type + public BigDecimal readBigDecimal(); + public boolean isSet(); + public void copyAsValue(DecimalWriter writer); + public void copyAsField(String name, DecimalWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/DurationReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/DurationReader.java new file mode 100644 index 000000000000..2873ab267d08 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/DurationReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface DurationReader extends BaseReader{ + + public void read(DurationHolder h); + public void read(NullableDurationHolder h); + public Object readObject(); + // read friendly type + public Duration readDuration(); + public boolean isSet(); + public void copyAsValue(DurationWriter writer); + public void copyAsField(String name, DurationWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/FixedSizeBinaryReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/FixedSizeBinaryReader.java new file mode 100644 index 000000000000..abb68f90d067 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/FixedSizeBinaryReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface FixedSizeBinaryReader extends BaseReader{ + + public void read(FixedSizeBinaryHolder h); + public void read(NullableFixedSizeBinaryHolder h); + public Object readObject(); + // read friendly type + public byte[] readByteArray(); + public boolean isSet(); + public void copyAsValue(FixedSizeBinaryWriter writer); + public void copyAsField(String name, FixedSizeBinaryWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/Float2Reader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/Float2Reader.java new file mode 100644 index 000000000000..caa8cc74d26a --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/Float2Reader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface Float2Reader extends BaseReader{ + + public void read(Float2Holder h); + public void read(NullableFloat2Holder h); + public Object readObject(); + // read friendly type + public Short readShort(); + public boolean isSet(); + public void copyAsValue(Float2Writer writer); + public void copyAsField(String name, Float2Writer writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/Float4Reader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/Float4Reader.java new file mode 100644 index 000000000000..52de34ae159a --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/Float4Reader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface Float4Reader extends BaseReader{ + + public void read(Float4Holder h); + public void read(NullableFloat4Holder h); + public Object readObject(); + // read friendly type + public Float readFloat(); + public boolean isSet(); + public void copyAsValue(Float4Writer writer); + public void copyAsField(String name, Float4Writer writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/Float8Reader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/Float8Reader.java new file mode 100644 index 000000000000..1f0c13e35d24 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/Float8Reader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface Float8Reader extends BaseReader{ + + public void read(Float8Holder h); + public void read(NullableFloat8Holder h); + public Object readObject(); + // read friendly type + public Double readDouble(); + public boolean isSet(); + public void copyAsValue(Float8Writer writer); + public void copyAsField(String name, Float8Writer writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/IntReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/IntReader.java new file mode 100644 index 000000000000..75c47f614dc8 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/IntReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface IntReader extends BaseReader{ + + public void read(IntHolder h); + public void read(NullableIntHolder h); + public Object readObject(); + // read friendly type + public Integer readInteger(); + public boolean isSet(); + public void copyAsValue(IntWriter writer); + public void copyAsField(String name, IntWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/IntervalDayReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/IntervalDayReader.java new file mode 100644 index 000000000000..9accf51f8a94 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/IntervalDayReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface IntervalDayReader extends BaseReader{ + + public void read(IntervalDayHolder h); + public void read(NullableIntervalDayHolder h); + public Object readObject(); + // read friendly type + public Duration readDuration(); + public boolean isSet(); + public void copyAsValue(IntervalDayWriter writer); + public void copyAsField(String name, IntervalDayWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/IntervalMonthDayNanoReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/IntervalMonthDayNanoReader.java new file mode 100644 index 000000000000..ebb78ad6c301 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/IntervalMonthDayNanoReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface IntervalMonthDayNanoReader extends BaseReader{ + + public void read(IntervalMonthDayNanoHolder h); + public void read(NullableIntervalMonthDayNanoHolder h); + public Object readObject(); + // read friendly type + public PeriodDuration readPeriodDuration(); + public boolean isSet(); + public void copyAsValue(IntervalMonthDayNanoWriter writer); + public void copyAsField(String name, IntervalMonthDayNanoWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/IntervalYearReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/IntervalYearReader.java new file mode 100644 index 000000000000..fe7df0a524f2 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/IntervalYearReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface IntervalYearReader extends BaseReader{ + + public void read(IntervalYearHolder h); + public void read(NullableIntervalYearHolder h); + public Object readObject(); + // read friendly type + public Period readPeriod(); + public boolean isSet(); + public void copyAsValue(IntervalYearWriter writer); + public void copyAsField(String name, IntervalYearWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/LargeVarBinaryReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/LargeVarBinaryReader.java new file mode 100644 index 000000000000..5cb65533591d --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/LargeVarBinaryReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface LargeVarBinaryReader extends BaseReader{ + + public void read(LargeVarBinaryHolder h); + public void read(NullableLargeVarBinaryHolder h); + public Object readObject(); + // read friendly type + public byte[] readByteArray(); + public boolean isSet(); + public void copyAsValue(LargeVarBinaryWriter writer); + public void copyAsField(String name, LargeVarBinaryWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/LargeVarCharReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/LargeVarCharReader.java new file mode 100644 index 000000000000..acecbd333b07 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/LargeVarCharReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface LargeVarCharReader extends BaseReader{ + + public void read(LargeVarCharHolder h); + public void read(NullableLargeVarCharHolder h); + public Object readObject(); + // read friendly type + public Text readText(); + public boolean isSet(); + public void copyAsValue(LargeVarCharWriter writer); + public void copyAsField(String name, LargeVarCharWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/SmallIntReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/SmallIntReader.java new file mode 100644 index 000000000000..316f9552989e --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/SmallIntReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface SmallIntReader extends BaseReader{ + + public void read(SmallIntHolder h); + public void read(NullableSmallIntHolder h); + public Object readObject(); + // read friendly type + public Short readShort(); + public boolean isSet(); + public void copyAsValue(SmallIntWriter writer); + public void copyAsField(String name, SmallIntWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeMicroReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeMicroReader.java new file mode 100644 index 000000000000..6607c5bb9ef0 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeMicroReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface TimeMicroReader extends BaseReader{ + + public void read(TimeMicroHolder h); + public void read(NullableTimeMicroHolder h); + public Object readObject(); + // read friendly type + public Long readLong(); + public boolean isSet(); + public void copyAsValue(TimeMicroWriter writer); + public void copyAsField(String name, TimeMicroWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeMilliReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeMilliReader.java new file mode 100644 index 000000000000..b9c93a38c126 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeMilliReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface TimeMilliReader extends BaseReader{ + + public void read(TimeMilliHolder h); + public void read(NullableTimeMilliHolder h); + public Object readObject(); + // read friendly type + public LocalDateTime readLocalDateTime(); + public boolean isSet(); + public void copyAsValue(TimeMilliWriter writer); + public void copyAsField(String name, TimeMilliWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeNanoReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeNanoReader.java new file mode 100644 index 000000000000..ab62ed0990e9 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeNanoReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface TimeNanoReader extends BaseReader{ + + public void read(TimeNanoHolder h); + public void read(NullableTimeNanoHolder h); + public Object readObject(); + // read friendly type + public Long readLong(); + public boolean isSet(); + public void copyAsValue(TimeNanoWriter writer); + public void copyAsField(String name, TimeNanoWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeSecReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeSecReader.java new file mode 100644 index 000000000000..f8a710bc542e --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeSecReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface TimeSecReader extends BaseReader{ + + public void read(TimeSecHolder h); + public void read(NullableTimeSecHolder h); + public Object readObject(); + // read friendly type + public Integer readInteger(); + public boolean isSet(); + public void copyAsValue(TimeSecWriter writer); + public void copyAsField(String name, TimeSecWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampMicroReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampMicroReader.java new file mode 100644 index 000000000000..ebafd3d189dd --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampMicroReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface TimeStampMicroReader extends BaseReader{ + + public void read(TimeStampMicroHolder h); + public void read(NullableTimeStampMicroHolder h); + public Object readObject(); + // read friendly type + public LocalDateTime readLocalDateTime(); + public boolean isSet(); + public void copyAsValue(TimeStampMicroWriter writer); + public void copyAsField(String name, TimeStampMicroWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampMicroTZReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampMicroTZReader.java new file mode 100644 index 000000000000..dfdf44f63d86 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampMicroTZReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface TimeStampMicroTZReader extends BaseReader{ + + public void read(TimeStampMicroTZHolder h); + public void read(NullableTimeStampMicroTZHolder h); + public Object readObject(); + // read friendly type + public Long readLong(); + public boolean isSet(); + public void copyAsValue(TimeStampMicroTZWriter writer); + public void copyAsField(String name, TimeStampMicroTZWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampMilliReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampMilliReader.java new file mode 100644 index 000000000000..f796aa69e728 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampMilliReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface TimeStampMilliReader extends BaseReader{ + + public void read(TimeStampMilliHolder h); + public void read(NullableTimeStampMilliHolder h); + public Object readObject(); + // read friendly type + public LocalDateTime readLocalDateTime(); + public boolean isSet(); + public void copyAsValue(TimeStampMilliWriter writer); + public void copyAsField(String name, TimeStampMilliWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampMilliTZReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampMilliTZReader.java new file mode 100644 index 000000000000..966cded48288 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampMilliTZReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface TimeStampMilliTZReader extends BaseReader{ + + public void read(TimeStampMilliTZHolder h); + public void read(NullableTimeStampMilliTZHolder h); + public Object readObject(); + // read friendly type + public Long readLong(); + public boolean isSet(); + public void copyAsValue(TimeStampMilliTZWriter writer); + public void copyAsField(String name, TimeStampMilliTZWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampNanoReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampNanoReader.java new file mode 100644 index 000000000000..f48e299ed367 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampNanoReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface TimeStampNanoReader extends BaseReader{ + + public void read(TimeStampNanoHolder h); + public void read(NullableTimeStampNanoHolder h); + public Object readObject(); + // read friendly type + public LocalDateTime readLocalDateTime(); + public boolean isSet(); + public void copyAsValue(TimeStampNanoWriter writer); + public void copyAsField(String name, TimeStampNanoWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampNanoTZReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampNanoTZReader.java new file mode 100644 index 000000000000..17bf4430d9ae --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampNanoTZReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface TimeStampNanoTZReader extends BaseReader{ + + public void read(TimeStampNanoTZHolder h); + public void read(NullableTimeStampNanoTZHolder h); + public Object readObject(); + // read friendly type + public Long readLong(); + public boolean isSet(); + public void copyAsValue(TimeStampNanoTZWriter writer); + public void copyAsField(String name, TimeStampNanoTZWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampSecReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampSecReader.java new file mode 100644 index 000000000000..ad1cb92b1faf --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampSecReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface TimeStampSecReader extends BaseReader{ + + public void read(TimeStampSecHolder h); + public void read(NullableTimeStampSecHolder h); + public Object readObject(); + // read friendly type + public LocalDateTime readLocalDateTime(); + public boolean isSet(); + public void copyAsValue(TimeStampSecWriter writer); + public void copyAsField(String name, TimeStampSecWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampSecTZReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampSecTZReader.java new file mode 100644 index 000000000000..930ba1cb8e6f --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TimeStampSecTZReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface TimeStampSecTZReader extends BaseReader{ + + public void read(TimeStampSecTZHolder h); + public void read(NullableTimeStampSecTZHolder h); + public Object readObject(); + // read friendly type + public Long readLong(); + public boolean isSet(); + public void copyAsValue(TimeStampSecTZWriter writer); + public void copyAsField(String name, TimeStampSecTZWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TinyIntReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TinyIntReader.java new file mode 100644 index 000000000000..8c366fb8657c --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/TinyIntReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface TinyIntReader extends BaseReader{ + + public void read(TinyIntHolder h); + public void read(NullableTinyIntHolder h); + public Object readObject(); + // read friendly type + public Byte readByte(); + public boolean isSet(); + public void copyAsValue(TinyIntWriter writer); + public void copyAsField(String name, TinyIntWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/UInt1Reader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/UInt1Reader.java new file mode 100644 index 000000000000..269e46699968 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/UInt1Reader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface UInt1Reader extends BaseReader{ + + public void read(UInt1Holder h); + public void read(NullableUInt1Holder h); + public Object readObject(); + // read friendly type + public Byte readByte(); + public boolean isSet(); + public void copyAsValue(UInt1Writer writer); + public void copyAsField(String name, UInt1Writer writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/UInt2Reader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/UInt2Reader.java new file mode 100644 index 000000000000..619a832de59a --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/UInt2Reader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface UInt2Reader extends BaseReader{ + + public void read(UInt2Holder h); + public void read(NullableUInt2Holder h); + public Object readObject(); + // read friendly type + public Character readCharacter(); + public boolean isSet(); + public void copyAsValue(UInt2Writer writer); + public void copyAsField(String name, UInt2Writer writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/UInt4Reader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/UInt4Reader.java new file mode 100644 index 000000000000..4a1e88ec6d32 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/UInt4Reader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface UInt4Reader extends BaseReader{ + + public void read(UInt4Holder h); + public void read(NullableUInt4Holder h); + public Object readObject(); + // read friendly type + public Integer readInteger(); + public boolean isSet(); + public void copyAsValue(UInt4Writer writer); + public void copyAsField(String name, UInt4Writer writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/UInt8Reader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/UInt8Reader.java new file mode 100644 index 000000000000..f07f3a1fd488 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/UInt8Reader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface UInt8Reader extends BaseReader{ + + public void read(UInt8Holder h); + public void read(NullableUInt8Holder h); + public Object readObject(); + // read friendly type + public Long readLong(); + public boolean isSet(); + public void copyAsValue(UInt8Writer writer); + public void copyAsField(String name, UInt8Writer writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/VarBinaryReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/VarBinaryReader.java new file mode 100644 index 000000000000..d9fee511178f --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/VarBinaryReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface VarBinaryReader extends BaseReader{ + + public void read(VarBinaryHolder h); + public void read(NullableVarBinaryHolder h); + public Object readObject(); + // read friendly type + public byte[] readByteArray(); + public boolean isSet(); + public void copyAsValue(VarBinaryWriter writer); + public void copyAsField(String name, VarBinaryWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/VarCharReader.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/VarCharReader.java new file mode 100644 index 000000000000..64d809da2d1e --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/reader/VarCharReader.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.reader; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/** + * Source code generated using FreeMarker template ComplexReaders.java + */ +@SuppressWarnings("unused") +public interface VarCharReader extends BaseReader{ + + public void read(VarCharHolder h); + public void read(NullableVarCharHolder h); + public Object readObject(); + // read friendly type + public Text readText(); + public boolean isSet(); + public void copyAsValue(VarCharWriter writer); + public void copyAsField(String name, VarCharWriter writer); + +} + + + + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/BaseWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/BaseWriter.java new file mode 100644 index 000000000000..efab0a2a6d67 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/BaseWriter.java @@ -0,0 +1,237 @@ + + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/* + * File generated from BaseWriter.java using FreeMarker. + */ +@SuppressWarnings("unused") +public interface BaseWriter extends AutoCloseable, Positionable { + int getValueCapacity(); + void writeNull(); + + public interface StructWriter extends BaseWriter { + + Field getField(); + + /** + * Whether this writer is a struct writer and is empty (has no children). + * + *

+ * Intended only for use in determining whether to add dummy vector to + * avoid empty (zero-column) schema, as in JsonReader. + *

+ * @return whether the struct is empty + */ + boolean isEmptyStruct(); + + TinyIntWriter tinyInt(String name); + UInt1Writer uInt1(String name); + UInt2Writer uInt2(String name); + SmallIntWriter smallInt(String name); + Float2Writer float2(String name); + IntWriter integer(String name); + UInt4Writer uInt4(String name); + Float4Writer float4(String name); + DateDayWriter dateDay(String name); + IntervalYearWriter intervalYear(String name); + TimeSecWriter timeSec(String name); + TimeMilliWriter timeMilli(String name); + BigIntWriter bigInt(String name); + UInt8Writer uInt8(String name); + Float8Writer float8(String name); + DateMilliWriter dateMilli(String name); + DurationWriter duration(String name, org.apache.arrow.vector.types.TimeUnit unit); + DurationWriter duration(String name); + TimeStampSecWriter timeStampSec(String name); + TimeStampMilliWriter timeStampMilli(String name); + TimeStampMicroWriter timeStampMicro(String name); + TimeStampNanoWriter timeStampNano(String name); + TimeStampSecTZWriter timeStampSecTZ(String name, String timezone); + TimeStampSecTZWriter timeStampSecTZ(String name); + TimeStampMilliTZWriter timeStampMilliTZ(String name, String timezone); + TimeStampMilliTZWriter timeStampMilliTZ(String name); + TimeStampMicroTZWriter timeStampMicroTZ(String name, String timezone); + TimeStampMicroTZWriter timeStampMicroTZ(String name); + TimeStampNanoTZWriter timeStampNanoTZ(String name, String timezone); + TimeStampNanoTZWriter timeStampNanoTZ(String name); + TimeMicroWriter timeMicro(String name); + TimeNanoWriter timeNano(String name); + IntervalDayWriter intervalDay(String name); + IntervalMonthDayNanoWriter intervalMonthDayNano(String name); + Decimal256Writer decimal256(String name, int scale, int precision); + Decimal256Writer decimal256(String name); + DecimalWriter decimal(String name, int scale, int precision); + DecimalWriter decimal(String name); + FixedSizeBinaryWriter fixedSizeBinary(String name, int byteWidth); + FixedSizeBinaryWriter fixedSizeBinary(String name); + VarBinaryWriter varBinary(String name); + VarCharWriter varChar(String name); + LargeVarCharWriter largeVarChar(String name); + LargeVarBinaryWriter largeVarBinary(String name); + BitWriter bit(String name); + + void copyReaderToField(String name, FieldReader reader); + StructWriter struct(String name); + ListWriter list(String name); + MapWriter map(String name); + MapWriter map(String name, boolean keysSorted); + void start(); + void end(); + } + + public interface ListWriter extends BaseWriter { + void startList(); + void endList(); + StructWriter struct(); + ListWriter list(); + MapWriter map(); + MapWriter map(boolean keysSorted); + void copyReader(FieldReader reader); + + TinyIntWriter tinyInt(); + UInt1Writer uInt1(); + UInt2Writer uInt2(); + SmallIntWriter smallInt(); + Float2Writer float2(); + IntWriter integer(); + UInt4Writer uInt4(); + Float4Writer float4(); + DateDayWriter dateDay(); + IntervalYearWriter intervalYear(); + TimeSecWriter timeSec(); + TimeMilliWriter timeMilli(); + BigIntWriter bigInt(); + UInt8Writer uInt8(); + Float8Writer float8(); + DateMilliWriter dateMilli(); + DurationWriter duration(); + TimeStampSecWriter timeStampSec(); + TimeStampMilliWriter timeStampMilli(); + TimeStampMicroWriter timeStampMicro(); + TimeStampNanoWriter timeStampNano(); + TimeStampSecTZWriter timeStampSecTZ(); + TimeStampMilliTZWriter timeStampMilliTZ(); + TimeStampMicroTZWriter timeStampMicroTZ(); + TimeStampNanoTZWriter timeStampNanoTZ(); + TimeMicroWriter timeMicro(); + TimeNanoWriter timeNano(); + IntervalDayWriter intervalDay(); + IntervalMonthDayNanoWriter intervalMonthDayNano(); + Decimal256Writer decimal256(); + DecimalWriter decimal(); + FixedSizeBinaryWriter fixedSizeBinary(); + VarBinaryWriter varBinary(); + VarCharWriter varChar(); + LargeVarCharWriter largeVarChar(); + LargeVarBinaryWriter largeVarBinary(); + BitWriter bit(); + } + + public interface MapWriter extends ListWriter { + void startMap(); + void endMap(); + + void startEntry(); + void endEntry(); + + MapWriter key(); + MapWriter value(); + } + + public interface ScalarWriter extends + TinyIntWriter, UInt1Writer, UInt2Writer, SmallIntWriter, Float2Writer, IntWriter, UInt4Writer, Float4Writer, DateDayWriter, IntervalYearWriter, TimeSecWriter, TimeMilliWriter, BigIntWriter, UInt8Writer, Float8Writer, DateMilliWriter, DurationWriter, TimeStampSecWriter, TimeStampMilliWriter, TimeStampMicroWriter, TimeStampNanoWriter, TimeStampSecTZWriter, TimeStampMilliTZWriter, TimeStampMicroTZWriter, TimeStampNanoTZWriter, TimeMicroWriter, TimeNanoWriter, IntervalDayWriter, IntervalMonthDayNanoWriter, Decimal256Writer, DecimalWriter, FixedSizeBinaryWriter, VarBinaryWriter, VarCharWriter, LargeVarCharWriter, LargeVarBinaryWriter, BitWriter, BaseWriter {} + + public interface ComplexWriter { + void allocate(); + void clear(); + void copyReader(FieldReader reader); + StructWriter rootAsStruct(); + ListWriter rootAsList(); + MapWriter rootAsMap(boolean keysSorted); + + void setPosition(int index); + void setValueCount(int count); + void reset(); + } + + public interface StructOrListWriter { + void start(); + void end(); + StructOrListWriter struct(String name); + /** + * @deprecated use {@link #listOfStruct()} instead. + */ + StructOrListWriter listoftstruct(String name); + StructOrListWriter listOfStruct(String name); + StructOrListWriter list(String name); + boolean isStructWriter(); + boolean isListWriter(); + VarCharWriter varChar(String name); + IntWriter integer(String name); + BigIntWriter bigInt(String name); + Float4Writer float4(String name); + Float8Writer float8(String name); + BitWriter bit(String name); + VarBinaryWriter binary(String name); + } +} diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/BigIntWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/BigIntWriter.java new file mode 100644 index 000000000000..b748ca2442fb --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/BigIntWriter.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface BigIntWriter extends BaseWriter { + public void write(BigIntHolder h); + + public void writeBigInt(long value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/BitWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/BitWriter.java new file mode 100644 index 000000000000..cb61b29c1073 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/BitWriter.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface BitWriter extends BaseWriter { + public void write(BitHolder h); + + public void writeBit(int value); + + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/DateDayWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/DateDayWriter.java new file mode 100644 index 000000000000..75081af6f3cc --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/DateDayWriter.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface DateDayWriter extends BaseWriter { + public void write(DateDayHolder h); + + public void writeDateDay(int value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/DateMilliWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/DateMilliWriter.java new file mode 100644 index 000000000000..bc5de12e256b --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/DateMilliWriter.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface DateMilliWriter extends BaseWriter { + public void write(DateMilliHolder h); + + public void writeDateMilli(long value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/Decimal256Writer.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/Decimal256Writer.java new file mode 100644 index 000000000000..444fc93e2791 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/Decimal256Writer.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface Decimal256Writer extends BaseWriter { + public void write(Decimal256Holder h); + + /** + * @deprecated + * The holder version should be used instead because the plain value version does not contain enough information + * to fully specify this field type. + * @see #write(Decimal256Holder) + */ + @Deprecated + public void writeDecimal256(long start, ArrowBuf buffer); + + public void writeDecimal256(long start, ArrowBuf buffer, ArrowType arrowType); + + public void writeDecimal256(BigDecimal value); + + public void writeBigEndianBytesToDecimal256(byte[] value, ArrowType arrowType); + + /** + * @deprecated + * Use either the version that additionally takes in an ArrowType or use the holder version. + * This version does not contain enough information to fully specify this field type. + * @see #writeBigEndianBytesToDecimal256(byte[], ArrowType) + * @see #write(Decimal256Holder) + */ + @Deprecated + public void writeBigEndianBytesToDecimal256(byte[] value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/DecimalWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/DecimalWriter.java new file mode 100644 index 000000000000..f9415034767f --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/DecimalWriter.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface DecimalWriter extends BaseWriter { + public void write(DecimalHolder h); + + /** + * @deprecated + * The holder version should be used instead because the plain value version does not contain enough information + * to fully specify this field type. + * @see #write(DecimalHolder) + */ + @Deprecated + public void writeDecimal(long start, ArrowBuf buffer); + + public void writeDecimal(long start, ArrowBuf buffer, ArrowType arrowType); + + public void writeDecimal(BigDecimal value); + + public void writeBigEndianBytesToDecimal(byte[] value, ArrowType arrowType); + + /** + * @deprecated + * Use either the version that additionally takes in an ArrowType or use the holder version. + * This version does not contain enough information to fully specify this field type. + * @see #writeBigEndianBytesToDecimal(byte[], ArrowType) + * @see #write(DecimalHolder) + */ + @Deprecated + public void writeBigEndianBytesToDecimal(byte[] value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/DurationWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/DurationWriter.java new file mode 100644 index 000000000000..128e93f19d07 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/DurationWriter.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface DurationWriter extends BaseWriter { + public void write(DurationHolder h); + + /** + * @deprecated + * The holder version should be used instead because the plain value version does not contain enough information + * to fully specify this field type. + * @see #write(DurationHolder) + */ + @Deprecated + public void writeDuration(long value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/FixedSizeBinaryWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/FixedSizeBinaryWriter.java new file mode 100644 index 000000000000..696d962c94b8 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/FixedSizeBinaryWriter.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface FixedSizeBinaryWriter extends BaseWriter { + public void write(FixedSizeBinaryHolder h); + + /** + * @deprecated + * The holder version should be used instead because the plain value version does not contain enough information + * to fully specify this field type. + * @see #write(FixedSizeBinaryHolder) + */ + @Deprecated + public void writeFixedSizeBinary(ArrowBuf buffer); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/Float2Writer.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/Float2Writer.java new file mode 100644 index 000000000000..69bd2961416e --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/Float2Writer.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface Float2Writer extends BaseWriter { + public void write(Float2Holder h); + + public void writeFloat2(short value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/Float4Writer.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/Float4Writer.java new file mode 100644 index 000000000000..e990876c41c2 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/Float4Writer.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface Float4Writer extends BaseWriter { + public void write(Float4Holder h); + + public void writeFloat4(float value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/Float8Writer.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/Float8Writer.java new file mode 100644 index 000000000000..6a976e74c7ac --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/Float8Writer.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface Float8Writer extends BaseWriter { + public void write(Float8Holder h); + + public void writeFloat8(double value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/IntWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/IntWriter.java new file mode 100644 index 000000000000..51040a39c80d --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/IntWriter.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface IntWriter extends BaseWriter { + public void write(IntHolder h); + + public void writeInt(int value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/IntervalDayWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/IntervalDayWriter.java new file mode 100644 index 000000000000..2d384ca223f9 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/IntervalDayWriter.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface IntervalDayWriter extends BaseWriter { + public void write(IntervalDayHolder h); + + public void writeIntervalDay(int days, int milliseconds); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/IntervalMonthDayNanoWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/IntervalMonthDayNanoWriter.java new file mode 100644 index 000000000000..a02b8945338f --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/IntervalMonthDayNanoWriter.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface IntervalMonthDayNanoWriter extends BaseWriter { + public void write(IntervalMonthDayNanoHolder h); + + public void writeIntervalMonthDayNano(int months, int days, long nanoseconds); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/IntervalYearWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/IntervalYearWriter.java new file mode 100644 index 000000000000..4c7bf4fe23dc --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/IntervalYearWriter.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface IntervalYearWriter extends BaseWriter { + public void write(IntervalYearHolder h); + + public void writeIntervalYear(int value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/LargeVarBinaryWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/LargeVarBinaryWriter.java new file mode 100644 index 000000000000..04f10c2016c2 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/LargeVarBinaryWriter.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface LargeVarBinaryWriter extends BaseWriter { + public void write(LargeVarBinaryHolder h); + + public void writeLargeVarBinary(long start, long end, ArrowBuf buffer); + + public void writeLargeVarBinary(byte[] value); + + public void writeLargeVarBinary(byte[] value, int offset, int length); + + public void writeLargeVarBinary(ByteBuffer value); + + public void writeLargeVarBinary(ByteBuffer value, int offset, int length); + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/LargeVarCharWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/LargeVarCharWriter.java new file mode 100644 index 000000000000..a3bdae62af49 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/LargeVarCharWriter.java @@ -0,0 +1,76 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface LargeVarCharWriter extends BaseWriter { + public void write(LargeVarCharHolder h); + + public void writeLargeVarChar(long start, long end, ArrowBuf buffer); + + + public void writeLargeVarChar(Text value); + + public void writeLargeVarChar(String value); +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/SmallIntWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/SmallIntWriter.java new file mode 100644 index 000000000000..f444bfb535f7 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/SmallIntWriter.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface SmallIntWriter extends BaseWriter { + public void write(SmallIntHolder h); + + public void writeSmallInt(short value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeMicroWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeMicroWriter.java new file mode 100644 index 000000000000..203ab8be3eac --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeMicroWriter.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface TimeMicroWriter extends BaseWriter { + public void write(TimeMicroHolder h); + + public void writeTimeMicro(long value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeMilliWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeMilliWriter.java new file mode 100644 index 000000000000..82ea7746d6b9 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeMilliWriter.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface TimeMilliWriter extends BaseWriter { + public void write(TimeMilliHolder h); + + public void writeTimeMilli(int value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeNanoWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeNanoWriter.java new file mode 100644 index 000000000000..d193f0974b3c --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeNanoWriter.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface TimeNanoWriter extends BaseWriter { + public void write(TimeNanoHolder h); + + public void writeTimeNano(long value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeSecWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeSecWriter.java new file mode 100644 index 000000000000..90e694c07a54 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeSecWriter.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface TimeSecWriter extends BaseWriter { + public void write(TimeSecHolder h); + + public void writeTimeSec(int value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampMicroTZWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampMicroTZWriter.java new file mode 100644 index 000000000000..7717b554cb3f --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampMicroTZWriter.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface TimeStampMicroTZWriter extends BaseWriter { + public void write(TimeStampMicroTZHolder h); + + /** + * @deprecated + * The holder version should be used instead because the plain value version does not contain enough information + * to fully specify this field type. + * @see #write(TimeStampMicroTZHolder) + */ + @Deprecated + public void writeTimeStampMicroTZ(long value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampMicroWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampMicroWriter.java new file mode 100644 index 000000000000..c00a094b1979 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampMicroWriter.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface TimeStampMicroWriter extends BaseWriter { + public void write(TimeStampMicroHolder h); + + public void writeTimeStampMicro(long value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampMilliTZWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampMilliTZWriter.java new file mode 100644 index 000000000000..c15a20e4aa61 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampMilliTZWriter.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface TimeStampMilliTZWriter extends BaseWriter { + public void write(TimeStampMilliTZHolder h); + + /** + * @deprecated + * The holder version should be used instead because the plain value version does not contain enough information + * to fully specify this field type. + * @see #write(TimeStampMilliTZHolder) + */ + @Deprecated + public void writeTimeStampMilliTZ(long value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampMilliWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampMilliWriter.java new file mode 100644 index 000000000000..375088424ee2 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampMilliWriter.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface TimeStampMilliWriter extends BaseWriter { + public void write(TimeStampMilliHolder h); + + public void writeTimeStampMilli(long value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampNanoTZWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampNanoTZWriter.java new file mode 100644 index 000000000000..d3310e69c23e --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampNanoTZWriter.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface TimeStampNanoTZWriter extends BaseWriter { + public void write(TimeStampNanoTZHolder h); + + /** + * @deprecated + * The holder version should be used instead because the plain value version does not contain enough information + * to fully specify this field type. + * @see #write(TimeStampNanoTZHolder) + */ + @Deprecated + public void writeTimeStampNanoTZ(long value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampNanoWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampNanoWriter.java new file mode 100644 index 000000000000..c466de7a885e --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampNanoWriter.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface TimeStampNanoWriter extends BaseWriter { + public void write(TimeStampNanoHolder h); + + public void writeTimeStampNano(long value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampSecTZWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampSecTZWriter.java new file mode 100644 index 000000000000..76a1cbdaf721 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampSecTZWriter.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface TimeStampSecTZWriter extends BaseWriter { + public void write(TimeStampSecTZHolder h); + + /** + * @deprecated + * The holder version should be used instead because the plain value version does not contain enough information + * to fully specify this field type. + * @see #write(TimeStampSecTZHolder) + */ + @Deprecated + public void writeTimeStampSecTZ(long value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampSecWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampSecWriter.java new file mode 100644 index 000000000000..1d97955cc65f --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TimeStampSecWriter.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface TimeStampSecWriter extends BaseWriter { + public void write(TimeStampSecHolder h); + + public void writeTimeStampSec(long value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TinyIntWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TinyIntWriter.java new file mode 100644 index 000000000000..2410d58419fd --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/TinyIntWriter.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface TinyIntWriter extends BaseWriter { + public void write(TinyIntHolder h); + + public void writeTinyInt(byte value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/UInt1Writer.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/UInt1Writer.java new file mode 100644 index 000000000000..1beab4e5930c --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/UInt1Writer.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface UInt1Writer extends BaseWriter { + public void write(UInt1Holder h); + + public void writeUInt1(byte value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/UInt2Writer.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/UInt2Writer.java new file mode 100644 index 000000000000..172afed89b74 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/UInt2Writer.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface UInt2Writer extends BaseWriter { + public void write(UInt2Holder h); + + public void writeUInt2(char value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/UInt4Writer.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/UInt4Writer.java new file mode 100644 index 000000000000..cc03b8d267ab --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/UInt4Writer.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface UInt4Writer extends BaseWriter { + public void write(UInt4Holder h); + + public void writeUInt4(int value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/UInt8Writer.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/UInt8Writer.java new file mode 100644 index 000000000000..b05bcaae0bf0 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/UInt8Writer.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface UInt8Writer extends BaseWriter { + public void write(UInt8Holder h); + + public void writeUInt8(long value); + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/VarBinaryWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/VarBinaryWriter.java new file mode 100644 index 000000000000..8222a59db7a0 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/VarBinaryWriter.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface VarBinaryWriter extends BaseWriter { + public void write(VarBinaryHolder h); + + public void writeVarBinary(int start, int end, ArrowBuf buffer); + + public void writeVarBinary(byte[] value); + + public void writeVarBinary(byte[] value, int offset, int length); + + public void writeVarBinary(ByteBuffer value); + + public void writeVarBinary(ByteBuffer value, int offset, int length); + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/VarCharWriter.java b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/VarCharWriter.java new file mode 100644 index 000000000000..634bb9ed23bf --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/complex/writer/VarCharWriter.java @@ -0,0 +1,76 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.complex.writer; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + +/* + * This class is generated using FreeMarker on the ComplexWriters.java template. + */ +@SuppressWarnings("unused") +public interface VarCharWriter extends BaseWriter { + public void write(VarCharHolder h); + + public void writeVarChar(int start, int end, ArrowBuf buffer); + + + public void writeVarChar(Text value); + + public void writeVarChar(String value); +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/BigIntHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/BigIntHolder.java new file mode 100644 index 000000000000..eafdb91112a4 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/BigIntHolder.java @@ -0,0 +1,94 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class BigIntHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public final int isSet = 1; + public long value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/BitHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/BitHolder.java new file mode 100644 index 000000000000..911ae26b622d --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/BitHolder.java @@ -0,0 +1,93 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class BitHolder implements ValueHolder{ + + public static final int WIDTH = 1; + + public final int isSet = 1; + public int value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/DateDayHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/DateDayHolder.java new file mode 100644 index 000000000000..894052b0dd82 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/DateDayHolder.java @@ -0,0 +1,94 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class DateDayHolder implements ValueHolder{ + + public static final int WIDTH = 4; + + public final int isSet = 1; + public int value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/DateMilliHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/DateMilliHolder.java new file mode 100644 index 000000000000..9cafe50d992c --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/DateMilliHolder.java @@ -0,0 +1,94 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class DateMilliHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public final int isSet = 1; + public long value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/Decimal256Holder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/Decimal256Holder.java new file mode 100644 index 000000000000..81921a0a77fd --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/Decimal256Holder.java @@ -0,0 +1,97 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class Decimal256Holder implements ValueHolder{ + + public static final int WIDTH = 32; + + public final int isSet = 1; + public long start; + public ArrowBuf buffer; + public int scale; + public int precision; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/DecimalHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/DecimalHolder.java new file mode 100644 index 000000000000..48c7f702e879 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/DecimalHolder.java @@ -0,0 +1,97 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class DecimalHolder implements ValueHolder{ + + public static final int WIDTH = 16; + + public final int isSet = 1; + public long start; + public ArrowBuf buffer; + public int scale; + public int precision; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/DurationHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/DurationHolder.java new file mode 100644 index 000000000000..dcb2c0fcc160 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/DurationHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class DurationHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public final int isSet = 1; + public long value; + public org.apache.arrow.vector.types.TimeUnit unit; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/FixedSizeBinaryHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/FixedSizeBinaryHolder.java new file mode 100644 index 000000000000..50c6e77756d4 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/FixedSizeBinaryHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class FixedSizeBinaryHolder implements ValueHolder{ + + public static final int WIDTH = -1; + + public final int isSet = 1; + public ArrowBuf buffer; + public int byteWidth; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/Float2Holder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/Float2Holder.java new file mode 100644 index 000000000000..d3e1dd564ec0 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/Float2Holder.java @@ -0,0 +1,94 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class Float2Holder implements ValueHolder{ + + public static final int WIDTH = 2; + + public final int isSet = 1; + public short value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/Float4Holder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/Float4Holder.java new file mode 100644 index 000000000000..4d67a4adcea4 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/Float4Holder.java @@ -0,0 +1,94 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class Float4Holder implements ValueHolder{ + + public static final int WIDTH = 4; + + public final int isSet = 1; + public float value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/Float8Holder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/Float8Holder.java new file mode 100644 index 000000000000..54b49f558f31 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/Float8Holder.java @@ -0,0 +1,94 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class Float8Holder implements ValueHolder{ + + public static final int WIDTH = 8; + + public final int isSet = 1; + public double value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/IntHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/IntHolder.java new file mode 100644 index 000000000000..ab919ac4b66e --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/IntHolder.java @@ -0,0 +1,94 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class IntHolder implements ValueHolder{ + + public static final int WIDTH = 4; + + public final int isSet = 1; + public int value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/IntervalDayHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/IntervalDayHolder.java new file mode 100644 index 000000000000..f6eaa86f70ae --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/IntervalDayHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class IntervalDayHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public final int isSet = 1; + public int days; + public int milliseconds; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/IntervalMonthDayNanoHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/IntervalMonthDayNanoHolder.java new file mode 100644 index 000000000000..f1c8b2302e3d --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/IntervalMonthDayNanoHolder.java @@ -0,0 +1,96 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class IntervalMonthDayNanoHolder implements ValueHolder{ + + public static final int WIDTH = 16; + + public final int isSet = 1; + public int months; + public int days; + public long nanoseconds; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/IntervalYearHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/IntervalYearHolder.java new file mode 100644 index 000000000000..19e63d9984d8 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/IntervalYearHolder.java @@ -0,0 +1,94 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class IntervalYearHolder implements ValueHolder{ + + public static final int WIDTH = 4; + + public final int isSet = 1; + public int value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/LargeVarBinaryHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/LargeVarBinaryHolder.java new file mode 100644 index 000000000000..d18dcd3eead2 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/LargeVarBinaryHolder.java @@ -0,0 +1,96 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class LargeVarBinaryHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public final int isSet = 1; + public long start; + public long end; + public ArrowBuf buffer; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/LargeVarCharHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/LargeVarCharHolder.java new file mode 100644 index 000000000000..50d881059aa0 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/LargeVarCharHolder.java @@ -0,0 +1,96 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class LargeVarCharHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public final int isSet = 1; + public long start; + public long end; + public ArrowBuf buffer; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableBigIntHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableBigIntHolder.java new file mode 100644 index 000000000000..27a51e995d95 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableBigIntHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableBigIntHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public int isSet; + + public long value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableBitHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableBitHolder.java new file mode 100644 index 000000000000..c1077bde6965 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableBitHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableBitHolder implements ValueHolder{ + + public static final int WIDTH = 1; + + public int isSet; + + public int value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableDateDayHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableDateDayHolder.java new file mode 100644 index 000000000000..7fd9f95968ff --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableDateDayHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableDateDayHolder implements ValueHolder{ + + public static final int WIDTH = 4; + + public int isSet; + + public int value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableDateMilliHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableDateMilliHolder.java new file mode 100644 index 000000000000..b4b3a3474be2 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableDateMilliHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableDateMilliHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public int isSet; + + public long value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableDecimal256Holder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableDecimal256Holder.java new file mode 100644 index 000000000000..e633ee81657a --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableDecimal256Holder.java @@ -0,0 +1,98 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableDecimal256Holder implements ValueHolder{ + + public static final int WIDTH = 32; + + public int isSet; + + public long start; + public ArrowBuf buffer; + public int scale; + public int precision; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableDecimalHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableDecimalHolder.java new file mode 100644 index 000000000000..a1a329a31f22 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableDecimalHolder.java @@ -0,0 +1,98 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableDecimalHolder implements ValueHolder{ + + public static final int WIDTH = 16; + + public int isSet; + + public long start; + public ArrowBuf buffer; + public int scale; + public int precision; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableDurationHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableDurationHolder.java new file mode 100644 index 000000000000..f6f488c4226f --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableDurationHolder.java @@ -0,0 +1,96 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableDurationHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public int isSet; + + public long value; + public org.apache.arrow.vector.types.TimeUnit unit; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableFixedSizeBinaryHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableFixedSizeBinaryHolder.java new file mode 100644 index 000000000000..3686fef814a8 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableFixedSizeBinaryHolder.java @@ -0,0 +1,96 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableFixedSizeBinaryHolder implements ValueHolder{ + + public static final int WIDTH = -1; + + public int isSet; + + public ArrowBuf buffer; + public int byteWidth; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableFloat2Holder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableFloat2Holder.java new file mode 100644 index 000000000000..9f78c3272c54 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableFloat2Holder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableFloat2Holder implements ValueHolder{ + + public static final int WIDTH = 2; + + public int isSet; + + public short value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableFloat4Holder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableFloat4Holder.java new file mode 100644 index 000000000000..25c474d54cff --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableFloat4Holder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableFloat4Holder implements ValueHolder{ + + public static final int WIDTH = 4; + + public int isSet; + + public float value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableFloat8Holder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableFloat8Holder.java new file mode 100644 index 000000000000..098083cf05a8 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableFloat8Holder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableFloat8Holder implements ValueHolder{ + + public static final int WIDTH = 8; + + public int isSet; + + public double value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableIntHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableIntHolder.java new file mode 100644 index 000000000000..3f3a3c746c64 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableIntHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableIntHolder implements ValueHolder{ + + public static final int WIDTH = 4; + + public int isSet; + + public int value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableIntervalDayHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableIntervalDayHolder.java new file mode 100644 index 000000000000..c1c3e7cfcb4a --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableIntervalDayHolder.java @@ -0,0 +1,96 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableIntervalDayHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public int isSet; + + public int days; + public int milliseconds; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableIntervalMonthDayNanoHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableIntervalMonthDayNanoHolder.java new file mode 100644 index 000000000000..0bfa3016afb1 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableIntervalMonthDayNanoHolder.java @@ -0,0 +1,97 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableIntervalMonthDayNanoHolder implements ValueHolder{ + + public static final int WIDTH = 16; + + public int isSet; + + public int months; + public int days; + public long nanoseconds; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableIntervalYearHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableIntervalYearHolder.java new file mode 100644 index 000000000000..3bf3a5acdaa9 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableIntervalYearHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableIntervalYearHolder implements ValueHolder{ + + public static final int WIDTH = 4; + + public int isSet; + + public int value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableLargeVarBinaryHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableLargeVarBinaryHolder.java new file mode 100644 index 000000000000..1b223974372c --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableLargeVarBinaryHolder.java @@ -0,0 +1,97 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableLargeVarBinaryHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public int isSet; + + public long start; + public long end; + public ArrowBuf buffer; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableLargeVarCharHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableLargeVarCharHolder.java new file mode 100644 index 000000000000..3daffdbf37dc --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableLargeVarCharHolder.java @@ -0,0 +1,97 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableLargeVarCharHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public int isSet; + + public long start; + public long end; + public ArrowBuf buffer; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableSmallIntHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableSmallIntHolder.java new file mode 100644 index 000000000000..85f1b291f4fe --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableSmallIntHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableSmallIntHolder implements ValueHolder{ + + public static final int WIDTH = 2; + + public int isSet; + + public short value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeMicroHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeMicroHolder.java new file mode 100644 index 000000000000..8aba6f24b714 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeMicroHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableTimeMicroHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public int isSet; + + public long value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeMilliHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeMilliHolder.java new file mode 100644 index 000000000000..2dd246863a8a --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeMilliHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableTimeMilliHolder implements ValueHolder{ + + public static final int WIDTH = 4; + + public int isSet; + + public int value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeNanoHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeNanoHolder.java new file mode 100644 index 000000000000..1c51cea42390 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeNanoHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableTimeNanoHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public int isSet; + + public long value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeSecHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeSecHolder.java new file mode 100644 index 000000000000..9e7c3a447716 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeSecHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableTimeSecHolder implements ValueHolder{ + + public static final int WIDTH = 4; + + public int isSet; + + public int value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampMicroHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampMicroHolder.java new file mode 100644 index 000000000000..c57517438a28 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampMicroHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableTimeStampMicroHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public int isSet; + + public long value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampMicroTZHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampMicroTZHolder.java new file mode 100644 index 000000000000..7eae2ec8f8ad --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampMicroTZHolder.java @@ -0,0 +1,96 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableTimeStampMicroTZHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public int isSet; + + public long value; + public String timezone; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampMilliHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampMilliHolder.java new file mode 100644 index 000000000000..3f2cb3b82936 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampMilliHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableTimeStampMilliHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public int isSet; + + public long value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampMilliTZHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampMilliTZHolder.java new file mode 100644 index 000000000000..9204dcfa3ae1 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampMilliTZHolder.java @@ -0,0 +1,96 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableTimeStampMilliTZHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public int isSet; + + public long value; + public String timezone; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampNanoHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampNanoHolder.java new file mode 100644 index 000000000000..38a8b3c9c6af --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampNanoHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableTimeStampNanoHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public int isSet; + + public long value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampNanoTZHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampNanoTZHolder.java new file mode 100644 index 000000000000..75ab8a2a602e --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampNanoTZHolder.java @@ -0,0 +1,96 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableTimeStampNanoTZHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public int isSet; + + public long value; + public String timezone; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampSecHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampSecHolder.java new file mode 100644 index 000000000000..33bf2acd986d --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampSecHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableTimeStampSecHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public int isSet; + + public long value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampSecTZHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampSecTZHolder.java new file mode 100644 index 000000000000..5384081de93b --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTimeStampSecTZHolder.java @@ -0,0 +1,96 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableTimeStampSecTZHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public int isSet; + + public long value; + public String timezone; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTinyIntHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTinyIntHolder.java new file mode 100644 index 000000000000..85020dae5f3e --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableTinyIntHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableTinyIntHolder implements ValueHolder{ + + public static final int WIDTH = 1; + + public int isSet; + + public byte value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableUInt1Holder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableUInt1Holder.java new file mode 100644 index 000000000000..0367927eef29 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableUInt1Holder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableUInt1Holder implements ValueHolder{ + + public static final int WIDTH = 1; + + public int isSet; + + public byte value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableUInt2Holder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableUInt2Holder.java new file mode 100644 index 000000000000..d4cff07ccb94 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableUInt2Holder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableUInt2Holder implements ValueHolder{ + + public static final int WIDTH = 2; + + public int isSet; + + public char value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableUInt4Holder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableUInt4Holder.java new file mode 100644 index 000000000000..023d1bbc3458 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableUInt4Holder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableUInt4Holder implements ValueHolder{ + + public static final int WIDTH = 4; + + public int isSet; + + public int value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableUInt8Holder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableUInt8Holder.java new file mode 100644 index 000000000000..153446ccf555 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableUInt8Holder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableUInt8Holder implements ValueHolder{ + + public static final int WIDTH = 8; + + public int isSet; + + public long value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableVarBinaryHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableVarBinaryHolder.java new file mode 100644 index 000000000000..80763fdb3e52 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableVarBinaryHolder.java @@ -0,0 +1,97 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableVarBinaryHolder implements ValueHolder{ + + public static final int WIDTH = 4; + + public int isSet; + + public int start; + public int end; + public ArrowBuf buffer; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableVarCharHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableVarCharHolder.java new file mode 100644 index 000000000000..4e32d6749b54 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/NullableVarCharHolder.java @@ -0,0 +1,97 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class NullableVarCharHolder implements ValueHolder{ + + public static final int WIDTH = 4; + + public int isSet; + + public int start; + public int end; + public ArrowBuf buffer; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/SmallIntHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/SmallIntHolder.java new file mode 100644 index 000000000000..c98b906fb902 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/SmallIntHolder.java @@ -0,0 +1,94 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class SmallIntHolder implements ValueHolder{ + + public static final int WIDTH = 2; + + public final int isSet = 1; + public short value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeMicroHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeMicroHolder.java new file mode 100644 index 000000000000..23dd586e06a1 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeMicroHolder.java @@ -0,0 +1,94 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class TimeMicroHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public final int isSet = 1; + public long value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeMilliHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeMilliHolder.java new file mode 100644 index 000000000000..461a30218202 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeMilliHolder.java @@ -0,0 +1,94 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class TimeMilliHolder implements ValueHolder{ + + public static final int WIDTH = 4; + + public final int isSet = 1; + public int value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeNanoHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeNanoHolder.java new file mode 100644 index 000000000000..cd9dd943cb2d --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeNanoHolder.java @@ -0,0 +1,94 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class TimeNanoHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public final int isSet = 1; + public long value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeSecHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeSecHolder.java new file mode 100644 index 000000000000..e1836a595b33 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeSecHolder.java @@ -0,0 +1,94 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class TimeSecHolder implements ValueHolder{ + + public static final int WIDTH = 4; + + public final int isSet = 1; + public int value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampMicroHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampMicroHolder.java new file mode 100644 index 000000000000..f4e83b0bac87 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampMicroHolder.java @@ -0,0 +1,94 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class TimeStampMicroHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public final int isSet = 1; + public long value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampMicroTZHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampMicroTZHolder.java new file mode 100644 index 000000000000..0f8766838ae6 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampMicroTZHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class TimeStampMicroTZHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public final int isSet = 1; + public long value; + public String timezone; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampMilliHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampMilliHolder.java new file mode 100644 index 000000000000..8d25d240544d --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampMilliHolder.java @@ -0,0 +1,94 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class TimeStampMilliHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public final int isSet = 1; + public long value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampMilliTZHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampMilliTZHolder.java new file mode 100644 index 000000000000..6387904eebbe --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampMilliTZHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class TimeStampMilliTZHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public final int isSet = 1; + public long value; + public String timezone; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampNanoHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampNanoHolder.java new file mode 100644 index 000000000000..faefeee0f174 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampNanoHolder.java @@ -0,0 +1,94 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class TimeStampNanoHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public final int isSet = 1; + public long value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampNanoTZHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampNanoTZHolder.java new file mode 100644 index 000000000000..bb5aa5fab2b7 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampNanoTZHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class TimeStampNanoTZHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public final int isSet = 1; + public long value; + public String timezone; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampSecHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampSecHolder.java new file mode 100644 index 000000000000..e5072a80f2c7 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampSecHolder.java @@ -0,0 +1,94 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class TimeStampSecHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public final int isSet = 1; + public long value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampSecTZHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampSecTZHolder.java new file mode 100644 index 000000000000..19b30d6af0d7 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TimeStampSecTZHolder.java @@ -0,0 +1,95 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class TimeStampSecTZHolder implements ValueHolder{ + + public static final int WIDTH = 8; + + public final int isSet = 1; + public long value; + public String timezone; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TinyIntHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TinyIntHolder.java new file mode 100644 index 000000000000..c91e3dcb0ab3 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/TinyIntHolder.java @@ -0,0 +1,94 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class TinyIntHolder implements ValueHolder{ + + public static final int WIDTH = 1; + + public final int isSet = 1; + public byte value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/UInt1Holder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/UInt1Holder.java new file mode 100644 index 000000000000..668f1d322272 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/UInt1Holder.java @@ -0,0 +1,94 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class UInt1Holder implements ValueHolder{ + + public static final int WIDTH = 1; + + public final int isSet = 1; + public byte value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/UInt2Holder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/UInt2Holder.java new file mode 100644 index 000000000000..20eae90feca2 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/UInt2Holder.java @@ -0,0 +1,94 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class UInt2Holder implements ValueHolder{ + + public static final int WIDTH = 2; + + public final int isSet = 1; + public char value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/UInt4Holder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/UInt4Holder.java new file mode 100644 index 000000000000..75891aff733c --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/UInt4Holder.java @@ -0,0 +1,94 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class UInt4Holder implements ValueHolder{ + + public static final int WIDTH = 4; + + public final int isSet = 1; + public int value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/UInt8Holder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/UInt8Holder.java new file mode 100644 index 000000000000..b9fed1b3139a --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/UInt8Holder.java @@ -0,0 +1,94 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class UInt8Holder implements ValueHolder{ + + public static final int WIDTH = 8; + + public final int isSet = 1; + public long value; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/VarBinaryHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/VarBinaryHolder.java new file mode 100644 index 000000000000..b097b05741c9 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/VarBinaryHolder.java @@ -0,0 +1,96 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class VarBinaryHolder implements ValueHolder{ + + public static final int WIDTH = 4; + + public final int isSet = 1; + public int start; + public int end; + public ArrowBuf buffer; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/holders/VarCharHolder.java b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/VarCharHolder.java new file mode 100644 index 000000000000..d8117b6b40b2 --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/holders/VarCharHolder.java @@ -0,0 +1,96 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.holders; + + +import static org.apache.arrow.util.Preconditions.checkArgument; +import static org.apache.arrow.util.Preconditions.checkState; + +import com.google.flatbuffers.FlatBufferBuilder; + +import org.apache.arrow.memory.*; +import org.apache.arrow.util.Preconditions; +import org.apache.arrow.vector.types.Types; +import org.apache.arrow.vector.types.Types.*; +import org.apache.arrow.vector.types.pojo.*; +import org.apache.arrow.vector.types.pojo.ArrowType.*; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.*; +import org.apache.arrow.vector.holders.*; +import org.apache.arrow.vector.util.*; +import org.apache.arrow.vector.complex.*; +import org.apache.arrow.vector.complex.reader.*; +import org.apache.arrow.vector.complex.impl.*; +import org.apache.arrow.vector.complex.writer.*; +import org.apache.arrow.vector.complex.writer.BaseWriter.StructWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.ListWriter; +import org.apache.arrow.vector.complex.writer.BaseWriter.MapWriter; +import org.apache.arrow.vector.util.JsonStringArrayList; + +import java.util.Arrays; +import java.util.Random; +import java.util.List; + +import java.io.Closeable; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.ByteBuffer; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.LocalDateTime; +import java.time.Period; +import java.time.ZonedDateTime; + + + +/** + * Source code generated using FreeMarker template ValueHolders.java + */ +public final class VarCharHolder implements ValueHolder{ + + public static final int WIDTH = 4; + + public final int isSet = 1; + public int start; + public int end; + public ArrowBuf buffer; + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public int hashCode(){ + throw new UnsupportedOperationException(); + } + + /** + * Reason for not supporting the operation is that ValueHolders are potential scalar + * replacements and hence we don't want any methods to be invoked on them. + */ + public String toString(){ + throw new UnsupportedOperationException(); + } + + + + +} + + diff --git a/java/vector/target/generated-sources/org/apache/arrow/vector/types/pojo/ArrowType.java b/java/vector/target/generated-sources/org/apache/arrow/vector/types/pojo/ArrowType.java new file mode 100644 index 000000000000..451a699296ad --- /dev/null +++ b/java/vector/target/generated-sources/org/apache/arrow/vector/types/pojo/ArrowType.java @@ -0,0 +1,1526 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.arrow.vector.types.pojo; + +import com.google.flatbuffers.FlatBufferBuilder; + +import java.util.Objects; + +import org.apache.arrow.flatbuf.Type; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.vector.types.*; +import org.apache.arrow.vector.FieldVector; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeInfo; + +/** + * Arrow types + * Source code generated using FreeMarker template ArrowType.java + **/ +@JsonTypeInfo( + use = JsonTypeInfo.Id.NAME, + include = JsonTypeInfo.As.PROPERTY, + property = "name") +@JsonSubTypes({ + @JsonSubTypes.Type(value = ArrowType.Null.class, name = "null"), + @JsonSubTypes.Type(value = ArrowType.Struct.class, name = "struct"), + @JsonSubTypes.Type(value = ArrowType.List.class, name = "list"), + @JsonSubTypes.Type(value = ArrowType.LargeList.class, name = "largelist"), + @JsonSubTypes.Type(value = ArrowType.FixedSizeList.class, name = "fixedsizelist"), + @JsonSubTypes.Type(value = ArrowType.Union.class, name = "union"), + @JsonSubTypes.Type(value = ArrowType.Map.class, name = "map"), + @JsonSubTypes.Type(value = ArrowType.Int.class, name = "int"), + @JsonSubTypes.Type(value = ArrowType.FloatingPoint.class, name = "floatingpoint"), + @JsonSubTypes.Type(value = ArrowType.Utf8.class, name = "utf8"), + @JsonSubTypes.Type(value = ArrowType.LargeUtf8.class, name = "largeutf8"), + @JsonSubTypes.Type(value = ArrowType.Binary.class, name = "binary"), + @JsonSubTypes.Type(value = ArrowType.LargeBinary.class, name = "largebinary"), + @JsonSubTypes.Type(value = ArrowType.FixedSizeBinary.class, name = "fixedsizebinary"), + @JsonSubTypes.Type(value = ArrowType.Bool.class, name = "bool"), + @JsonSubTypes.Type(value = ArrowType.Decimal.class, name = "decimal"), + @JsonSubTypes.Type(value = ArrowType.Date.class, name = "date"), + @JsonSubTypes.Type(value = ArrowType.Time.class, name = "time"), + @JsonSubTypes.Type(value = ArrowType.Timestamp.class, name = "timestamp"), + @JsonSubTypes.Type(value = ArrowType.Interval.class, name = "interval"), + @JsonSubTypes.Type(value = ArrowType.Duration.class, name = "duration"), +}) +public abstract class ArrowType { + + public static abstract class PrimitiveType extends ArrowType { + + private PrimitiveType() { + } + + @Override + public boolean isComplex() { + return false; + } + } + + public static abstract class ComplexType extends ArrowType { + + private ComplexType() { + } + + @Override + public boolean isComplex() { + return true; + } + } + + public static enum ArrowTypeID { + Null(Type.Null), + Struct(Type.Struct_), + List(Type.List), + LargeList(Type.LargeList), + FixedSizeList(Type.FixedSizeList), + Union(Type.Union), + Map(Type.Map), + Int(Type.Int), + FloatingPoint(Type.FloatingPoint), + Utf8(Type.Utf8), + LargeUtf8(Type.LargeUtf8), + Binary(Type.Binary), + LargeBinary(Type.LargeBinary), + FixedSizeBinary(Type.FixedSizeBinary), + Bool(Type.Bool), + Decimal(Type.Decimal), + Date(Type.Date), + Time(Type.Time), + Timestamp(Type.Timestamp), + Interval(Type.Interval), + Duration(Type.Duration), + NONE(Type.NONE); + + private final byte flatbufType; + + public byte getFlatbufID() { + return this.flatbufType; + } + + private ArrowTypeID(byte flatbufType) { + this.flatbufType = flatbufType; + } + } + + @JsonIgnore + public abstract ArrowTypeID getTypeID(); + @JsonIgnore + public abstract boolean isComplex(); + public abstract int getType(FlatBufferBuilder builder); + public abstract T accept(ArrowTypeVisitor visitor); + + /** + * to visit the ArrowTypes + * + * type.accept(new ArrowTypeVisitor<Type>() { + * ... + * }); + * + */ + public static interface ArrowTypeVisitor { + T visit(Null type); + T visit(Struct type); + T visit(List type); + T visit(LargeList type); + T visit(FixedSizeList type); + T visit(Union type); + T visit(Map type); + T visit(Int type); + T visit(FloatingPoint type); + T visit(Utf8 type); + T visit(LargeUtf8 type); + T visit(Binary type); + T visit(LargeBinary type); + T visit(FixedSizeBinary type); + T visit(Bool type); + T visit(Decimal type); + T visit(Date type); + T visit(Time type); + T visit(Timestamp type); + T visit(Interval type); + T visit(Duration type); + default T visit(ExtensionType type) { + return type.storageType().accept(this); + } + } + + /** + * to visit the Complex ArrowTypes and bundle Primitive ones in one case + */ + public static abstract class ComplexTypeVisitor implements ArrowTypeVisitor { + + public T visit(PrimitiveType type) { + throw new UnsupportedOperationException("Unexpected Primitive type: " + type); + } + + public final T visit(Null type) { + return visit((PrimitiveType) type); + } + public final T visit(Int type) { + return visit((PrimitiveType) type); + } + public final T visit(FloatingPoint type) { + return visit((PrimitiveType) type); + } + public final T visit(Utf8 type) { + return visit((PrimitiveType) type); + } + public final T visit(LargeUtf8 type) { + return visit((PrimitiveType) type); + } + public final T visit(Binary type) { + return visit((PrimitiveType) type); + } + public final T visit(LargeBinary type) { + return visit((PrimitiveType) type); + } + public final T visit(FixedSizeBinary type) { + return visit((PrimitiveType) type); + } + public final T visit(Bool type) { + return visit((PrimitiveType) type); + } + public final T visit(Decimal type) { + return visit((PrimitiveType) type); + } + public final T visit(Date type) { + return visit((PrimitiveType) type); + } + public final T visit(Time type) { + return visit((PrimitiveType) type); + } + public final T visit(Timestamp type) { + return visit((PrimitiveType) type); + } + public final T visit(Interval type) { + return visit((PrimitiveType) type); + } + public final T visit(Duration type) { + return visit((PrimitiveType) type); + } + } + + /** + * to visit the Primitive ArrowTypes and bundle Complex ones under one case + */ + public static abstract class PrimitiveTypeVisitor implements ArrowTypeVisitor { + + public T visit(ComplexType type) { + throw new UnsupportedOperationException("Unexpected Complex type: " + type); + } + + public final T visit(Struct type) { + return visit((ComplexType) type); + } + public final T visit(List type) { + return visit((ComplexType) type); + } + public final T visit(LargeList type) { + return visit((ComplexType) type); + } + public final T visit(FixedSizeList type) { + return visit((ComplexType) type); + } + public final T visit(Union type) { + return visit((ComplexType) type); + } + public final T visit(Map type) { + return visit((ComplexType) type); + } + } + + public static class Null extends PrimitiveType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.Null; + public static final Null INSTANCE = new Null(); + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + org.apache.arrow.flatbuf.Null.startNull(builder); + return org.apache.arrow.flatbuf.Null.endNull(builder); + } + + public String toString() { + return "Null" + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Null)) { + return false; + } + return true; + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + public static class Struct extends ComplexType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.Struct; + public static final Struct INSTANCE = new Struct(); + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + org.apache.arrow.flatbuf.Struct_.startStruct_(builder); + return org.apache.arrow.flatbuf.Struct_.endStruct_(builder); + } + + public String toString() { + return "Struct" + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Struct)) { + return false; + } + return true; + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + public static class List extends ComplexType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.List; + public static final List INSTANCE = new List(); + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + org.apache.arrow.flatbuf.List.startList(builder); + return org.apache.arrow.flatbuf.List.endList(builder); + } + + public String toString() { + return "List" + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof List)) { + return false; + } + return true; + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + public static class LargeList extends ComplexType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.LargeList; + public static final LargeList INSTANCE = new LargeList(); + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + org.apache.arrow.flatbuf.LargeList.startLargeList(builder); + return org.apache.arrow.flatbuf.LargeList.endLargeList(builder); + } + + public String toString() { + return "LargeList" + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof LargeList)) { + return false; + } + return true; + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + public static class FixedSizeList extends ComplexType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.FixedSizeList; + + int listSize; + + + @JsonCreator + public FixedSizeList( + @JsonProperty("listSize") int listSize + ) { + this.listSize = listSize; + } + + public int getListSize() { + return listSize; + } + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + org.apache.arrow.flatbuf.FixedSizeList.startFixedSizeList(builder); + org.apache.arrow.flatbuf.FixedSizeList.addListSize(builder, this.listSize); + return org.apache.arrow.flatbuf.FixedSizeList.endFixedSizeList(builder); + } + + public String toString() { + return "FixedSizeList" + + "(" + + listSize + + ")" + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {listSize}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof FixedSizeList)) { + return false; + } + FixedSizeList that = (FixedSizeList) obj; + return Objects.deepEquals(this.listSize, that.listSize) ; + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + public static class Union extends ComplexType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.Union; + + UnionMode mode; + int[] typeIds; + + + @JsonCreator + public Union( + @JsonProperty("mode") UnionMode mode, + @JsonProperty("typeIds") int[] typeIds + ) { + this.mode = mode; + this.typeIds = typeIds; + } + + public UnionMode getMode() { + return mode; + } + public int[] getTypeIds() { + return typeIds; + } + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + int typeIds = this.typeIds == null ? -1 : org.apache.arrow.flatbuf.Union.createTypeIdsVector(builder, this.typeIds); + org.apache.arrow.flatbuf.Union.startUnion(builder); + org.apache.arrow.flatbuf.Union.addMode(builder, this.mode.getFlatbufID()); + if (this.typeIds != null) { + org.apache.arrow.flatbuf.Union.addTypeIds(builder, typeIds); + } + return org.apache.arrow.flatbuf.Union.endUnion(builder); + } + + public String toString() { + return "Union" + + "(" + + mode + ", " + + java.util.Arrays.toString(typeIds) + + ")" + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {mode, typeIds}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Union)) { + return false; + } + Union that = (Union) obj; + return Objects.deepEquals(this.mode, that.mode) && +Objects.deepEquals(this.typeIds, that.typeIds) ; + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + public static class Map extends ComplexType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.Map; + + boolean keysSorted; + + + @JsonCreator + public Map( + @JsonProperty("keysSorted") boolean keysSorted + ) { + this.keysSorted = keysSorted; + } + + public boolean getKeysSorted() { + return keysSorted; + } + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + org.apache.arrow.flatbuf.Map.startMap(builder); + org.apache.arrow.flatbuf.Map.addKeysSorted(builder, this.keysSorted); + return org.apache.arrow.flatbuf.Map.endMap(builder); + } + + public String toString() { + return "Map" + + "(" + + keysSorted + + ")" + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {keysSorted}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Map)) { + return false; + } + Map that = (Map) obj; + return Objects.deepEquals(this.keysSorted, that.keysSorted) ; + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + public static class Int extends PrimitiveType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.Int; + + int bitWidth; + boolean isSigned; + + + @JsonCreator + public Int( + @JsonProperty("bitWidth") int bitWidth, + @JsonProperty("isSigned") boolean isSigned + ) { + this.bitWidth = bitWidth; + this.isSigned = isSigned; + } + + public int getBitWidth() { + return bitWidth; + } + public boolean getIsSigned() { + return isSigned; + } + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + org.apache.arrow.flatbuf.Int.startInt(builder); + org.apache.arrow.flatbuf.Int.addBitWidth(builder, this.bitWidth); + org.apache.arrow.flatbuf.Int.addIsSigned(builder, this.isSigned); + return org.apache.arrow.flatbuf.Int.endInt(builder); + } + + public String toString() { + return "Int" + + "(" + + bitWidth + ", " + + isSigned + + ")" + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {bitWidth, isSigned}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Int)) { + return false; + } + Int that = (Int) obj; + return Objects.deepEquals(this.bitWidth, that.bitWidth) && +Objects.deepEquals(this.isSigned, that.isSigned) ; + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + public static class FloatingPoint extends PrimitiveType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.FloatingPoint; + + FloatingPointPrecision precision; + + + @JsonCreator + public FloatingPoint( + @JsonProperty("precision") FloatingPointPrecision precision + ) { + this.precision = precision; + } + + public FloatingPointPrecision getPrecision() { + return precision; + } + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + org.apache.arrow.flatbuf.FloatingPoint.startFloatingPoint(builder); + org.apache.arrow.flatbuf.FloatingPoint.addPrecision(builder, this.precision.getFlatbufID()); + return org.apache.arrow.flatbuf.FloatingPoint.endFloatingPoint(builder); + } + + public String toString() { + return "FloatingPoint" + + "(" + + precision + + ")" + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {precision}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof FloatingPoint)) { + return false; + } + FloatingPoint that = (FloatingPoint) obj; + return Objects.deepEquals(this.precision, that.precision) ; + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + public static class Utf8 extends PrimitiveType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.Utf8; + public static final Utf8 INSTANCE = new Utf8(); + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + org.apache.arrow.flatbuf.Utf8.startUtf8(builder); + return org.apache.arrow.flatbuf.Utf8.endUtf8(builder); + } + + public String toString() { + return "Utf8" + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Utf8)) { + return false; + } + return true; + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + public static class LargeUtf8 extends PrimitiveType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.LargeUtf8; + public static final LargeUtf8 INSTANCE = new LargeUtf8(); + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + org.apache.arrow.flatbuf.LargeUtf8.startLargeUtf8(builder); + return org.apache.arrow.flatbuf.LargeUtf8.endLargeUtf8(builder); + } + + public String toString() { + return "LargeUtf8" + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof LargeUtf8)) { + return false; + } + return true; + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + public static class Binary extends PrimitiveType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.Binary; + public static final Binary INSTANCE = new Binary(); + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + org.apache.arrow.flatbuf.Binary.startBinary(builder); + return org.apache.arrow.flatbuf.Binary.endBinary(builder); + } + + public String toString() { + return "Binary" + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Binary)) { + return false; + } + return true; + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + public static class LargeBinary extends PrimitiveType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.LargeBinary; + public static final LargeBinary INSTANCE = new LargeBinary(); + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + org.apache.arrow.flatbuf.LargeBinary.startLargeBinary(builder); + return org.apache.arrow.flatbuf.LargeBinary.endLargeBinary(builder); + } + + public String toString() { + return "LargeBinary" + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof LargeBinary)) { + return false; + } + return true; + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + public static class FixedSizeBinary extends PrimitiveType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.FixedSizeBinary; + + int byteWidth; + + + @JsonCreator + public FixedSizeBinary( + @JsonProperty("byteWidth") int byteWidth + ) { + this.byteWidth = byteWidth; + } + + public int getByteWidth() { + return byteWidth; + } + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + org.apache.arrow.flatbuf.FixedSizeBinary.startFixedSizeBinary(builder); + org.apache.arrow.flatbuf.FixedSizeBinary.addByteWidth(builder, this.byteWidth); + return org.apache.arrow.flatbuf.FixedSizeBinary.endFixedSizeBinary(builder); + } + + public String toString() { + return "FixedSizeBinary" + + "(" + + byteWidth + + ")" + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {byteWidth}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof FixedSizeBinary)) { + return false; + } + FixedSizeBinary that = (FixedSizeBinary) obj; + return Objects.deepEquals(this.byteWidth, that.byteWidth) ; + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + public static class Bool extends PrimitiveType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.Bool; + public static final Bool INSTANCE = new Bool(); + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + org.apache.arrow.flatbuf.Bool.startBool(builder); + return org.apache.arrow.flatbuf.Bool.endBool(builder); + } + + public String toString() { + return "Bool" + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Bool)) { + return false; + } + return true; + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + public static class Decimal extends PrimitiveType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.Decimal; + + int precision; + int scale; + int bitWidth; + + + // Needed to support golden file integration tests. + @JsonCreator + public static Decimal createDecimal( + @JsonProperty("precision") int precision, + @JsonProperty("scale") int scale, + @JsonProperty("bitWidth") Integer bitWidth) { + + return new Decimal(precision, scale, bitWidth == null ? 128 : bitWidth); + } + + /** + * Construct Decimal with 128 bits. + * + * This is kept mainly for the sake of backward compatibility. + * Please use {@link org.apache.arrow.vector.types.pojo.ArrowType.Decimal#Decimal(int, int, int)} instead. + * + * @deprecated This API will be removed in a future release. + */ + @Deprecated + public Decimal(int precision, int scale) { + this(precision, scale, 128); + } + + public Decimal( + @JsonProperty("precision") int precision, + @JsonProperty("scale") int scale, + @JsonProperty("bitWidth") int bitWidth + ) { + this.precision = precision; + this.scale = scale; + this.bitWidth = bitWidth; + } + + public int getPrecision() { + return precision; + } + public int getScale() { + return scale; + } + public int getBitWidth() { + return bitWidth; + } + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + org.apache.arrow.flatbuf.Decimal.startDecimal(builder); + org.apache.arrow.flatbuf.Decimal.addPrecision(builder, this.precision); + org.apache.arrow.flatbuf.Decimal.addScale(builder, this.scale); + org.apache.arrow.flatbuf.Decimal.addBitWidth(builder, this.bitWidth); + return org.apache.arrow.flatbuf.Decimal.endDecimal(builder); + } + + public String toString() { + return "Decimal" + + "(" + + precision + ", " + + scale + ", " + + bitWidth + + ")" + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {precision, scale, bitWidth}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Decimal)) { + return false; + } + Decimal that = (Decimal) obj; + return Objects.deepEquals(this.precision, that.precision) && +Objects.deepEquals(this.scale, that.scale) && +Objects.deepEquals(this.bitWidth, that.bitWidth) ; + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + public static class Date extends PrimitiveType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.Date; + + DateUnit unit; + + + @JsonCreator + public Date( + @JsonProperty("unit") DateUnit unit + ) { + this.unit = unit; + } + + public DateUnit getUnit() { + return unit; + } + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + org.apache.arrow.flatbuf.Date.startDate(builder); + org.apache.arrow.flatbuf.Date.addUnit(builder, this.unit.getFlatbufID()); + return org.apache.arrow.flatbuf.Date.endDate(builder); + } + + public String toString() { + return "Date" + + "(" + + unit + + ")" + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {unit}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Date)) { + return false; + } + Date that = (Date) obj; + return Objects.deepEquals(this.unit, that.unit) ; + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + public static class Time extends PrimitiveType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.Time; + + TimeUnit unit; + int bitWidth; + + + @JsonCreator + public Time( + @JsonProperty("unit") TimeUnit unit, + @JsonProperty("bitWidth") int bitWidth + ) { + this.unit = unit; + this.bitWidth = bitWidth; + } + + public TimeUnit getUnit() { + return unit; + } + public int getBitWidth() { + return bitWidth; + } + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + org.apache.arrow.flatbuf.Time.startTime(builder); + org.apache.arrow.flatbuf.Time.addUnit(builder, this.unit.getFlatbufID()); + org.apache.arrow.flatbuf.Time.addBitWidth(builder, this.bitWidth); + return org.apache.arrow.flatbuf.Time.endTime(builder); + } + + public String toString() { + return "Time" + + "(" + + unit + ", " + + bitWidth + + ")" + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {unit, bitWidth}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Time)) { + return false; + } + Time that = (Time) obj; + return Objects.deepEquals(this.unit, that.unit) && +Objects.deepEquals(this.bitWidth, that.bitWidth) ; + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + public static class Timestamp extends PrimitiveType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.Timestamp; + + TimeUnit unit; + String timezone; + + + @JsonCreator + public Timestamp( + @JsonProperty("unit") TimeUnit unit, + @JsonProperty("timezone") String timezone + ) { + this.unit = unit; + this.timezone = timezone; + } + + public TimeUnit getUnit() { + return unit; + } + public String getTimezone() { + return timezone; + } + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + int timezone = this.timezone == null ? -1 : builder.createString(this.timezone); + org.apache.arrow.flatbuf.Timestamp.startTimestamp(builder); + org.apache.arrow.flatbuf.Timestamp.addUnit(builder, this.unit.getFlatbufID()); + if (this.timezone != null) { + org.apache.arrow.flatbuf.Timestamp.addTimezone(builder, timezone); + } + return org.apache.arrow.flatbuf.Timestamp.endTimestamp(builder); + } + + public String toString() { + return "Timestamp" + + "(" + + unit + ", " + + timezone + + ")" + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {unit, timezone}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Timestamp)) { + return false; + } + Timestamp that = (Timestamp) obj; + return Objects.deepEquals(this.unit, that.unit) && +Objects.deepEquals(this.timezone, that.timezone) ; + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + public static class Interval extends PrimitiveType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.Interval; + + IntervalUnit unit; + + + @JsonCreator + public Interval( + @JsonProperty("unit") IntervalUnit unit + ) { + this.unit = unit; + } + + public IntervalUnit getUnit() { + return unit; + } + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + org.apache.arrow.flatbuf.Interval.startInterval(builder); + org.apache.arrow.flatbuf.Interval.addUnit(builder, this.unit.getFlatbufID()); + return org.apache.arrow.flatbuf.Interval.endInterval(builder); + } + + public String toString() { + return "Interval" + + "(" + + unit + + ")" + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {unit}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Interval)) { + return false; + } + Interval that = (Interval) obj; + return Objects.deepEquals(this.unit, that.unit) ; + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + public static class Duration extends PrimitiveType { + public static final ArrowTypeID TYPE_TYPE = ArrowTypeID.Duration; + + TimeUnit unit; + + + @JsonCreator + public Duration( + @JsonProperty("unit") TimeUnit unit + ) { + this.unit = unit; + } + + public TimeUnit getUnit() { + return unit; + } + + @Override + public ArrowTypeID getTypeID() { + return TYPE_TYPE; + } + + @Override + public int getType(FlatBufferBuilder builder) { + org.apache.arrow.flatbuf.Duration.startDuration(builder); + org.apache.arrow.flatbuf.Duration.addUnit(builder, this.unit.getFlatbufID()); + return org.apache.arrow.flatbuf.Duration.endDuration(builder); + } + + public String toString() { + return "Duration" + + "(" + + unit + + ")" + ; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {unit}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Duration)) { + return false; + } + Duration that = (Duration) obj; + return Objects.deepEquals(this.unit, that.unit) ; + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + + /** + * A user-defined data type that wraps an underlying storage type. + */ + public abstract static class ExtensionType extends ComplexType { + /** The on-wire type for this user-defined type. */ + public abstract ArrowType storageType(); + /** The name of this user-defined type. Used to identify the type during serialization. */ + public abstract String extensionName(); + /** Check equality of this type to another user-defined type. */ + public abstract boolean extensionEquals(ExtensionType other); + /** Save any metadata for this type. */ + public abstract String serialize(); + /** Given saved metadata and the underlying storage type, construct a new instance of the user type. */ + public abstract ArrowType deserialize(ArrowType storageType, String serializedData); + /** Construct a vector for the user type. */ + public abstract FieldVector getNewVector(String name, FieldType fieldType, BufferAllocator allocator); + + /** The field metadata key storing the name of the extension type. */ + public static final String EXTENSION_METADATA_KEY_NAME = "ARROW:extension:name"; + /** The field metadata key storing metadata for the extension type. */ + public static final String EXTENSION_METADATA_KEY_METADATA = "ARROW:extension:metadata"; + + @Override + public ArrowTypeID getTypeID() { + return storageType().getTypeID(); + } + + @Override + public int getType(FlatBufferBuilder builder) { + return storageType().getType(builder); + } + + public String toString() { + return "ExtensionType(" + extensionName() + ", " + storageType().toString() + ")"; + } + + @Override + public int hashCode() { + return java.util.Arrays.deepHashCode(new Object[] {storageType(), extensionName()}); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof ExtensionType)) { + return false; + } + return this.extensionEquals((ExtensionType) obj); + } + + @Override + public T accept(ArrowTypeVisitor visitor) { + return visitor.visit(this); + } + } + + private static final int defaultDecimalBitWidth = 128; + + public static org.apache.arrow.vector.types.pojo.ArrowType getTypeForField(org.apache.arrow.flatbuf.Field field) { + switch(field.typeType()) { + case Type.Null: { + org.apache.arrow.flatbuf.Null nullType = (org.apache.arrow.flatbuf.Null) field.type(new org.apache.arrow.flatbuf.Null()); + return new Null(); + } + case Type.Struct_: { + org.apache.arrow.flatbuf.Struct_ struct_Type = (org.apache.arrow.flatbuf.Struct_) field.type(new org.apache.arrow.flatbuf.Struct_()); + return new Struct(); + } + case Type.List: { + org.apache.arrow.flatbuf.List listType = (org.apache.arrow.flatbuf.List) field.type(new org.apache.arrow.flatbuf.List()); + return new List(); + } + case Type.LargeList: { + org.apache.arrow.flatbuf.LargeList largelistType = (org.apache.arrow.flatbuf.LargeList) field.type(new org.apache.arrow.flatbuf.LargeList()); + return new LargeList(); + } + case Type.FixedSizeList: { + org.apache.arrow.flatbuf.FixedSizeList fixedsizelistType = (org.apache.arrow.flatbuf.FixedSizeList) field.type(new org.apache.arrow.flatbuf.FixedSizeList()); + int listSize = fixedsizelistType.listSize(); + return new FixedSizeList(listSize); + } + case Type.Union: { + org.apache.arrow.flatbuf.Union unionType = (org.apache.arrow.flatbuf.Union) field.type(new org.apache.arrow.flatbuf.Union()); + short mode = unionType.mode(); + int[] typeIds = new int[unionType.typeIdsLength()]; + for (int i = 0; i< typeIds.length; ++i) { + typeIds[i] = unionType.typeIds(i); + } + return new Union(UnionMode.fromFlatbufID(mode), typeIds); + } + case Type.Map: { + org.apache.arrow.flatbuf.Map mapType = (org.apache.arrow.flatbuf.Map) field.type(new org.apache.arrow.flatbuf.Map()); + boolean keysSorted = mapType.keysSorted(); + return new Map(keysSorted); + } + case Type.Int: { + org.apache.arrow.flatbuf.Int intType = (org.apache.arrow.flatbuf.Int) field.type(new org.apache.arrow.flatbuf.Int()); + int bitWidth = intType.bitWidth(); + boolean isSigned = intType.isSigned(); + return new Int(bitWidth, isSigned); + } + case Type.FloatingPoint: { + org.apache.arrow.flatbuf.FloatingPoint floatingpointType = (org.apache.arrow.flatbuf.FloatingPoint) field.type(new org.apache.arrow.flatbuf.FloatingPoint()); + short precision = floatingpointType.precision(); + return new FloatingPoint(FloatingPointPrecision.fromFlatbufID(precision)); + } + case Type.Utf8: { + org.apache.arrow.flatbuf.Utf8 utf8Type = (org.apache.arrow.flatbuf.Utf8) field.type(new org.apache.arrow.flatbuf.Utf8()); + return new Utf8(); + } + case Type.LargeUtf8: { + org.apache.arrow.flatbuf.LargeUtf8 largeutf8Type = (org.apache.arrow.flatbuf.LargeUtf8) field.type(new org.apache.arrow.flatbuf.LargeUtf8()); + return new LargeUtf8(); + } + case Type.Binary: { + org.apache.arrow.flatbuf.Binary binaryType = (org.apache.arrow.flatbuf.Binary) field.type(new org.apache.arrow.flatbuf.Binary()); + return new Binary(); + } + case Type.LargeBinary: { + org.apache.arrow.flatbuf.LargeBinary largebinaryType = (org.apache.arrow.flatbuf.LargeBinary) field.type(new org.apache.arrow.flatbuf.LargeBinary()); + return new LargeBinary(); + } + case Type.FixedSizeBinary: { + org.apache.arrow.flatbuf.FixedSizeBinary fixedsizebinaryType = (org.apache.arrow.flatbuf.FixedSizeBinary) field.type(new org.apache.arrow.flatbuf.FixedSizeBinary()); + int byteWidth = fixedsizebinaryType.byteWidth(); + return new FixedSizeBinary(byteWidth); + } + case Type.Bool: { + org.apache.arrow.flatbuf.Bool boolType = (org.apache.arrow.flatbuf.Bool) field.type(new org.apache.arrow.flatbuf.Bool()); + return new Bool(); + } + case Type.Decimal: { + org.apache.arrow.flatbuf.Decimal decimalType = (org.apache.arrow.flatbuf.Decimal) field.type(new org.apache.arrow.flatbuf.Decimal()); + int precision = decimalType.precision(); + int scale = decimalType.scale(); + int bitWidth = decimalType.bitWidth(); + if (bitWidth != defaultDecimalBitWidth && bitWidth != 256) { + throw new IllegalArgumentException("Library only supports 128-bit and 256-bit decimal values"); + } + return new Decimal(precision, scale, bitWidth); + } + case Type.Date: { + org.apache.arrow.flatbuf.Date dateType = (org.apache.arrow.flatbuf.Date) field.type(new org.apache.arrow.flatbuf.Date()); + short unit = dateType.unit(); + return new Date(DateUnit.fromFlatbufID(unit)); + } + case Type.Time: { + org.apache.arrow.flatbuf.Time timeType = (org.apache.arrow.flatbuf.Time) field.type(new org.apache.arrow.flatbuf.Time()); + short unit = timeType.unit(); + int bitWidth = timeType.bitWidth(); + return new Time(TimeUnit.fromFlatbufID(unit), bitWidth); + } + case Type.Timestamp: { + org.apache.arrow.flatbuf.Timestamp timestampType = (org.apache.arrow.flatbuf.Timestamp) field.type(new org.apache.arrow.flatbuf.Timestamp()); + short unit = timestampType.unit(); + String timezone = timestampType.timezone(); + return new Timestamp(TimeUnit.fromFlatbufID(unit), timezone); + } + case Type.Interval: { + org.apache.arrow.flatbuf.Interval intervalType = (org.apache.arrow.flatbuf.Interval) field.type(new org.apache.arrow.flatbuf.Interval()); + short unit = intervalType.unit(); + return new Interval(IntervalUnit.fromFlatbufID(unit)); + } + case Type.Duration: { + org.apache.arrow.flatbuf.Duration durationType = (org.apache.arrow.flatbuf.Duration) field.type(new org.apache.arrow.flatbuf.Duration()); + short unit = durationType.unit(); + return new Duration(TimeUnit.fromFlatbufID(unit)); + } + default: + throw new UnsupportedOperationException("Unsupported type: " + field.typeType()); + } + } + + public static Int getInt(org.apache.arrow.flatbuf.Field field) { + org.apache.arrow.flatbuf.Int intType = (org.apache.arrow.flatbuf.Int) field.type(new org.apache.arrow.flatbuf.Int()); + return new Int(intType.bitWidth(), intType.isSigned()); + } +} + + diff --git a/java/vector/target/test-classes/logback.xml b/java/vector/target/test-classes/logback.xml new file mode 100644 index 000000000000..f9e449fa67b2 --- /dev/null +++ b/java/vector/target/test-classes/logback.xml @@ -0,0 +1,28 @@ + + + + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + + + + diff --git a/python/cmake_modules b/python/cmake_modules deleted file mode 120000 index 76e2a8d12c5e..000000000000 --- a/python/cmake_modules +++ /dev/null @@ -1 +0,0 @@ -../cpp/cmake_modules \ No newline at end of file diff --git a/python/cmake_modules/BuildUtils.cmake b/python/cmake_modules/BuildUtils.cmake new file mode 100644 index 000000000000..d9e06b1657bc --- /dev/null +++ b/python/cmake_modules/BuildUtils.cmake @@ -0,0 +1,984 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# Common path suffixes to be searched by find_library or find_path. +# Windows artifacts may be found under "/Library", so +# search there as well. +set(ARROW_LIBRARY_PATH_SUFFIXES + "${CMAKE_LIBRARY_ARCHITECTURE}" + "lib/${CMAKE_LIBRARY_ARCHITECTURE}" + "lib64" + "lib32" + "lib" + "bin" + "Library" + "Library/lib" + "Library/bin") +set(ARROW_INCLUDE_PATH_SUFFIXES "include" "Library" "Library/include") + +function(add_thirdparty_lib LIB_NAME LIB_TYPE LIB) + set(options) + set(one_value_args) + set(multi_value_args DEPS INCLUDE_DIRECTORIES) + cmake_parse_arguments(ARG + "${options}" + "${one_value_args}" + "${multi_value_args}" + ${ARGN}) + if(ARG_UNPARSED_ARGUMENTS) + message(SEND_ERROR "Error: unrecognized arguments: ${ARG_UNPARSED_ARGUMENTS}") + endif() + + add_library(${LIB_NAME} ${LIB_TYPE} IMPORTED) + if(${LIB_TYPE} STREQUAL "STATIC") + set_target_properties(${LIB_NAME} PROPERTIES IMPORTED_LOCATION "${LIB}") + message(STATUS "Added static library dependency ${LIB_NAME}: ${LIB}") + else() + if(WIN32) + # Mark the ".lib" location as part of a Windows DLL + set_target_properties(${LIB_NAME} PROPERTIES IMPORTED_IMPLIB "${LIB}") + else() + set_target_properties(${LIB_NAME} PROPERTIES IMPORTED_LOCATION "${LIB}") + endif() + message(STATUS "Added shared library dependency ${LIB_NAME}: ${LIB}") + endif() + if(ARG_DEPS) + set_target_properties(${LIB_NAME} PROPERTIES INTERFACE_LINK_LIBRARIES "${ARG_DEPS}") + endif() + if(ARG_INCLUDE_DIRECTORIES) + set_target_properties(${LIB_NAME} PROPERTIES INTERFACE_INCLUDE_DIRECTORIES + "${ARG_INCLUDE_DIRECTORIES}") + endif() +endfunction() + +# Based on MIT-licensed +# https://gist.github.com/cristianadam/ef920342939a89fae3e8a85ca9459b49 +function(arrow_create_merged_static_lib output_target) + set(options) + set(one_value_args NAME ROOT) + set(multi_value_args TO_MERGE) + cmake_parse_arguments(ARG + "${options}" + "${one_value_args}" + "${multi_value_args}" + ${ARGN}) + if(ARG_UNPARSED_ARGUMENTS) + message(SEND_ERROR "Error: unrecognized arguments: ${ARG_UNPARSED_ARGUMENTS}") + endif() + + file(MAKE_DIRECTORY ${BUILD_OUTPUT_ROOT_DIRECTORY}) + set(output_lib_path + ${BUILD_OUTPUT_ROOT_DIRECTORY}${CMAKE_STATIC_LIBRARY_PREFIX}${ARG_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX} + ) + + set(all_library_paths $) + foreach(lib ${ARG_TO_MERGE}) + list(APPEND all_library_paths $) + endforeach() + + if(APPLE) + # Get the version string from a libtool binary. + function(get_libtool_version item result_var) + execute_process(COMMAND "${item}" -V + OUTPUT_VARIABLE _version + OUTPUT_STRIP_TRAILING_WHITESPACE ERROR_QUIET) + set(${result_var} + "${_version}" + PARENT_SCOPE) + endfunction() + + # Validator function to confirm that the libtool is Apple's libtool. + # The apple-distributed libtool is what we want for bundling, but there is + # a GNU libtool that has a name collision (and happens to be bundled with R, too). + # We are not compatible with GNU libtool, so we need to avoid it. + function(validate_apple_libtool result_var item) + get_libtool_version("${item}" libtool_version) + if(NOT "${libtool_version}" MATCHES ".*cctools.+([0-9.]+).*") + set(${result_var} + FALSE + PARENT_SCOPE) + endif() + endfunction() + + if(CMAKE_LIBTOOL) + set(LIBTOOL_MACOS ${CMAKE_LIBTOOL}) + # Validate that CMAKE_LIBTOOL is Apple's libtool + validate_apple_libtool(is_apple_libtool "${LIBTOOL_MACOS}") + if(NOT is_apple_libtool) + get_libtool_version("${LIBTOOL_MACOS}" _libtool_version_output) + message(FATAL_ERROR "CMAKE_LIBTOOL does not appear to be Apple's libtool: ${LIBTOOL_MACOS}\nlibtool -V output: ${_libtool_version_output}" + ) + endif() + else() + # Check in the obvious places first to find Apple's libtool + # HINTS is used before system paths and before PATHS, so we use that + # even though hard coded paths should go in PATHS + find_program(LIBTOOL_MACOS libtool + HINTS /usr/bin /Library/Developer/CommandLineTools/usr/bin VALIDATOR + validate_apple_libtool) + if(NOT LIBTOOL_MACOS) + # Find any libtool (without validation) to show its version in the error + find_program(_any_libtool libtool) + if(_any_libtool) + get_libtool_version("${_any_libtool}" _libtool_version_output) + endif() + message(FATAL_ERROR "Could not find Apple's libtool. GNU libtool is not compatible." + "\nFound libtool: ${_any_libtool}" + "\nlibtool -V output: ${_libtool_version_output}") + endif() + endif() + + set(BUNDLE_COMMAND ${LIBTOOL_MACOS} "-no_warning_for_no_symbols" "-static" "-o" + ${output_lib_path} ${all_library_paths}) + elseif(MSVC) + if(CMAKE_LIBTOOL) + set(BUNDLE_TOOL ${CMAKE_LIBTOOL}) + else() + find_program(BUNDLE_TOOL lib HINTS "${CMAKE_CXX_COMPILER}/..") + if(NOT BUNDLE_TOOL) + message(FATAL_ERROR "Cannot locate lib.exe to bundle libraries") + endif() + endif() + set(BUNDLE_COMMAND ${BUNDLE_TOOL} /NOLOGO /OUT:${output_lib_path} + ${all_library_paths}) + elseif(CMAKE_CXX_COMPILER_ID MATCHES "^(Clang|GNU|Intel|IntelLLVM)$") + set(ar_script_path ${CMAKE_BINARY_DIR}/${ARG_NAME}.ar) + + file(WRITE ${ar_script_path}.in "CREATE ${output_lib_path}\n") + file(APPEND ${ar_script_path}.in "ADDLIB $\n") + + foreach(lib ${ARG_TO_MERGE}) + file(APPEND ${ar_script_path}.in "ADDLIB $\n") + endforeach() + + file(APPEND ${ar_script_path}.in "SAVE\nEND\n") + file(GENERATE + OUTPUT ${ar_script_path} + INPUT ${ar_script_path}.in) + set(ar_tool ${CMAKE_AR}) + + if(CMAKE_INTERPROCEDURAL_OPTIMIZATION) + set(ar_tool ${CMAKE_CXX_COMPILER_AR}) + endif() + + set(BUNDLE_COMMAND ${ar_tool} -M < ${ar_script_path}) + else() + message(FATAL_ERROR "Unknown bundle scenario!") + endif() + + add_custom_target(${output_target}_merge ALL + ${BUNDLE_COMMAND} + DEPENDS ${ARG_ROOT} ${ARG_TO_MERGE} + BYPRODUCTS ${output_lib_path} + COMMENT "Bundling ${output_lib_path}" + VERBATIM) + + message(STATUS "Creating bundled static library target ${output_target} at ${output_lib_path}" + ) + + add_library(${output_target} STATIC IMPORTED GLOBAL) + set_target_properties(${output_target} PROPERTIES IMPORTED_LOCATION ${output_lib_path}) + add_dependencies(${output_target} ${output_target}_merge) +endfunction() + +function(arrow_install_cmake_package PACKAGE_NAME EXPORT_NAME) + set(CONFIG_CMAKE "${PACKAGE_NAME}Config.cmake") + set(BUILT_CONFIG_CMAKE "${CMAKE_CURRENT_BINARY_DIR}/${CONFIG_CMAKE}") + configure_package_config_file("${CONFIG_CMAKE}.in" "${BUILT_CONFIG_CMAKE}" + INSTALL_DESTINATION "${ARROW_CMAKE_DIR}/${PACKAGE_NAME}") + set(CONFIG_VERSION_CMAKE "${PACKAGE_NAME}ConfigVersion.cmake") + set(BUILT_CONFIG_VERSION_CMAKE "${CMAKE_CURRENT_BINARY_DIR}/${CONFIG_VERSION_CMAKE}") + write_basic_package_version_file("${BUILT_CONFIG_VERSION_CMAKE}" + COMPATIBILITY SameMajorVersion) + install(FILES "${BUILT_CONFIG_CMAKE}" "${BUILT_CONFIG_VERSION_CMAKE}" + DESTINATION "${ARROW_CMAKE_DIR}/${PACKAGE_NAME}") + set(TARGETS_CMAKE "${PACKAGE_NAME}Targets.cmake") + install(EXPORT ${EXPORT_NAME} + DESTINATION "${ARROW_CMAKE_DIR}/${PACKAGE_NAME}" + NAMESPACE "${PACKAGE_NAME}::" + FILE "${TARGETS_CMAKE}") +endfunction() + +# \arg OUTPUTS list to append built targets to +function(ADD_ARROW_LIB LIB_NAME) + set(options) + set(one_value_args + BUILD_SHARED + BUILD_STATIC + CMAKE_PACKAGE_NAME + INSTALL_ARCHIVE_DIR + INSTALL_LIBRARY_DIR + INSTALL_RUNTIME_DIR + PKG_CONFIG_NAME + SHARED_LINK_FLAGS) + set(multi_value_args + SOURCES + OUTPUTS + STATIC_LINK_LIBS + SHARED_LINK_LIBS + SHARED_PRIVATE_LINK_LIBS + EXTRA_INCLUDES + PRIVATE_INCLUDES + DEPENDENCIES + DEFINITIONS + SHARED_INSTALL_INTERFACE_LIBS + STATIC_INSTALL_INTERFACE_LIBS + OUTPUT_PATH) + cmake_parse_arguments(ARG + "${options}" + "${one_value_args}" + "${multi_value_args}" + ${ARGN}) + if(ARG_UNPARSED_ARGUMENTS) + message(SEND_ERROR "Error: unrecognized arguments: ${ARG_UNPARSED_ARGUMENTS}") + endif() + + if(ARG_OUTPUTS) + set(${ARG_OUTPUTS}) + endif() + + # Allow overriding ARROW_BUILD_SHARED and ARROW_BUILD_STATIC + if(DEFINED ARG_BUILD_SHARED) + set(BUILD_SHARED ${ARG_BUILD_SHARED}) + else() + set(BUILD_SHARED ${ARROW_BUILD_SHARED}) + endif() + if(DEFINED ARG_BUILD_STATIC) + set(BUILD_STATIC ${ARG_BUILD_STATIC}) + else() + set(BUILD_STATIC ${ARROW_BUILD_STATIC}) + endif() + if(ARG_OUTPUT_PATH) + set(OUTPUT_PATH ${ARG_OUTPUT_PATH}) + else() + set(OUTPUT_PATH ${BUILD_OUTPUT_ROOT_DIRECTORY}) + endif() + + if(WIN32 + OR CMAKE_GENERATOR STREQUAL Xcode + OR NOT ARROW_POSITION_INDEPENDENT_CODE) + # We need to compile C++ separately for each library kind (shared and static) + # because of dllexport declarations on Windows. + # The Xcode generator doesn't reliably work with Xcode as target names are not + # guessed correctly. + set(USE_OBJLIB OFF) + else() + set(USE_OBJLIB ON) + endif() + + if(USE_OBJLIB) + # Generate a single "objlib" from all C++ modules and link + # that "objlib" into each library kind, to avoid compiling twice + add_library(${LIB_NAME}_objlib OBJECT ${ARG_SOURCES}) + # Necessary to make static linking into other shared libraries work properly + set_property(TARGET ${LIB_NAME}_objlib PROPERTY POSITION_INDEPENDENT_CODE ON) + if(ARG_DEPENDENCIES) + add_dependencies(${LIB_NAME}_objlib ${ARG_DEPENDENCIES}) + endif() + if(ARG_DEFINITIONS) + target_compile_definitions(${LIB_NAME}_objlib PRIVATE ${ARG_DEFINITIONS}) + endif() + target_compile_options(${LIB_NAME}_objlib PRIVATE ${ARROW_LIBRARIES_ONLY_CXX_FLAGS}) + set(LIB_DEPS $) + set(EXTRA_DEPS) + + if(ARG_OUTPUTS) + list(APPEND ${ARG_OUTPUTS} ${LIB_NAME}_objlib) + endif() + + if(ARG_EXTRA_INCLUDES) + target_include_directories(${LIB_NAME}_objlib SYSTEM PUBLIC ${ARG_EXTRA_INCLUDES}) + endif() + if(ARG_PRIVATE_INCLUDES) + target_include_directories(${LIB_NAME}_objlib PRIVATE ${ARG_PRIVATE_INCLUDES}) + endif() + if(BUILD_SHARED) + if(ARG_SHARED_LINK_LIBS) + target_link_libraries(${LIB_NAME}_objlib PRIVATE ${ARG_SHARED_LINK_LIBS}) + endif() + if(ARG_SHARED_PRIVATE_LINK_LIBS) + target_link_libraries(${LIB_NAME}_objlib PRIVATE ${ARG_SHARED_PRIVATE_LINK_LIBS}) + endif() + endif() + if(BUILD_STATIC AND ARG_STATIC_LINK_LIBS) + target_link_libraries(${LIB_NAME}_objlib PRIVATE ${ARG_STATIC_LINK_LIBS}) + endif() + else() + # Prepare arguments for separate compilation of static and shared libs below + set(LIB_DEPS ${ARG_SOURCES}) + set(EXTRA_DEPS ${ARG_DEPENDENCIES}) + endif() + + if(ARG_EXTRA_INCLUDES) + set(LIB_INCLUDES ${ARG_EXTRA_INCLUDES}) + else() + set(LIB_INCLUDES "") + endif() + + if(ARG_INSTALL_ARCHIVE_DIR) + set(INSTALL_ARCHIVE_DIR ${ARG_INSTALL_ARCHIVE_DIR}) + else() + set(INSTALL_ARCHIVE_DIR ${CMAKE_INSTALL_LIBDIR}) + endif() + if(ARG_INSTALL_LIBRARY_DIR) + set(INSTALL_LIBRARY_DIR ${ARG_INSTALL_LIBRARY_DIR}) + else() + set(INSTALL_LIBRARY_DIR ${CMAKE_INSTALL_LIBDIR}) + endif() + if(ARG_INSTALL_RUNTIME_DIR) + set(INSTALL_RUNTIME_DIR ${ARG_INSTALL_RUNTIME_DIR}) + else() + set(INSTALL_RUNTIME_DIR bin) + endif() + + if(BUILD_SHARED) + add_library(${LIB_NAME}_shared SHARED ${LIB_DEPS}) + if(EXTRA_DEPS) + add_dependencies(${LIB_NAME}_shared ${EXTRA_DEPS}) + endif() + + if(ARG_DEFINITIONS) + target_compile_definitions(${LIB_NAME}_shared PRIVATE ${ARG_DEFINITIONS}) + endif() + target_compile_options(${LIB_NAME}_shared PRIVATE ${ARROW_LIBRARIES_ONLY_CXX_FLAGS}) + + if(ARG_OUTPUTS) + list(APPEND ${ARG_OUTPUTS} ${LIB_NAME}_shared) + endif() + + if(LIB_INCLUDES) + target_include_directories(${LIB_NAME}_shared SYSTEM PUBLIC ${ARG_EXTRA_INCLUDES}) + endif() + + if(ARG_PRIVATE_INCLUDES) + target_include_directories(${LIB_NAME}_shared PRIVATE ${ARG_PRIVATE_INCLUDES}) + endif() + + # On iOS, specifying -undefined conflicts with enabling bitcode + if(APPLE + AND NOT IOS + AND NOT DEFINED ENV{EMSCRIPTEN}) + # On OS X, you can avoid linking at library load time and instead + # expecting that the symbols have been loaded separately. This happens + # with libpython* where there can be conflicts between system Python and + # the Python from a thirdparty distribution + # + # When running with the Emscripten Compiler, we need not worry about + # python, and the Emscripten Compiler does not support this option. + set(ARG_SHARED_LINK_FLAGS "-undefined dynamic_lookup ${ARG_SHARED_LINK_FLAGS}") + endif() + + set_target_properties(${LIB_NAME}_shared + PROPERTIES LIBRARY_OUTPUT_DIRECTORY "${OUTPUT_PATH}" + RUNTIME_OUTPUT_DIRECTORY "${OUTPUT_PATH}" + PDB_OUTPUT_DIRECTORY "${OUTPUT_PATH}" + LINK_FLAGS "${ARG_SHARED_LINK_FLAGS}" + OUTPUT_NAME ${LIB_NAME} + VERSION "${ARROW_FULL_SO_VERSION}" + SOVERSION "${ARROW_SO_VERSION}") + + target_link_libraries(${LIB_NAME}_shared + PUBLIC "$" + "$" + PRIVATE ${ARG_SHARED_PRIVATE_LINK_LIBS}) + + if(USE_OBJLIB) + # Ensure that dependencies are built before compilation of objects in + # object library, rather than only before the final link step + foreach(SHARED_LINK_LIB ${ARG_SHARED_LINK_LIBS}) + if(TARGET ${SHARED_LINK_LIB}) + add_dependencies(${LIB_NAME}_objlib ${SHARED_LINK_LIB}) + endif() + endforeach() + endif() + + if(ARROW_RPATH_ORIGIN) + if(APPLE) + set(_lib_install_rpath "@loader_path") + else() + set(_lib_install_rpath "\$ORIGIN") + endif() + set_target_properties(${LIB_NAME}_shared PROPERTIES INSTALL_RPATH + ${_lib_install_rpath}) + endif() + + if(APPLE) + if(ARROW_INSTALL_NAME_RPATH) + set(_lib_install_name "@rpath") + else() + set(_lib_install_name "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}") + endif() + set_target_properties(${LIB_NAME}_shared + PROPERTIES BUILD_WITH_INSTALL_RPATH ON INSTALL_NAME_DIR + "${_lib_install_name}") + endif() + + install(TARGETS ${LIB_NAME}_shared ${INSTALL_IS_OPTIONAL} + EXPORT ${LIB_NAME}_targets + ARCHIVE DESTINATION ${INSTALL_ARCHIVE_DIR} + LIBRARY DESTINATION ${INSTALL_LIBRARY_DIR} + RUNTIME DESTINATION ${INSTALL_RUNTIME_DIR} + INCLUDES + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) + endif() + + if(BUILD_STATIC) + add_library(${LIB_NAME}_static STATIC ${LIB_DEPS}) + if(EXTRA_DEPS) + add_dependencies(${LIB_NAME}_static ${EXTRA_DEPS}) + endif() + + if(ARG_DEFINITIONS) + target_compile_definitions(${LIB_NAME}_static PRIVATE ${ARG_DEFINITIONS}) + endif() + target_compile_options(${LIB_NAME}_static PRIVATE ${ARROW_LIBRARIES_ONLY_CXX_FLAGS}) + + if(ARG_OUTPUTS) + list(APPEND ${ARG_OUTPUTS} ${LIB_NAME}_static) + endif() + + if(LIB_INCLUDES) + target_include_directories(${LIB_NAME}_static SYSTEM PUBLIC ${ARG_EXTRA_INCLUDES}) + endif() + + if(ARG_PRIVATE_INCLUDES) + target_include_directories(${LIB_NAME}_static PRIVATE ${ARG_PRIVATE_INCLUDES}) + endif() + + if(MSVC_TOOLCHAIN) + set(LIB_NAME_STATIC ${LIB_NAME}_static) + else() + set(LIB_NAME_STATIC ${LIB_NAME}) + endif() + + if(WIN32) + target_compile_definitions(${LIB_NAME}_static PUBLIC ARROW_STATIC) + target_compile_definitions(${LIB_NAME}_static PUBLIC ARROW_FLIGHT_STATIC) + target_compile_definitions(${LIB_NAME}_static PUBLIC ARROW_FLIGHT_SQL_STATIC) + endif() + + set_target_properties(${LIB_NAME}_static + PROPERTIES LIBRARY_OUTPUT_DIRECTORY "${OUTPUT_PATH}" + OUTPUT_NAME ${LIB_NAME_STATIC}) + + if(ARG_STATIC_INSTALL_INTERFACE_LIBS) + target_link_libraries(${LIB_NAME}_static + INTERFACE "$" + ) + endif() + + if(ARG_STATIC_LINK_LIBS) + target_link_libraries(${LIB_NAME}_static + PUBLIC "$") + if(USE_OBJLIB) + # Ensure that dependencies are built before compilation of objects in + # object library, rather than only before the final link step + foreach(STATIC_LINK_LIB ${ARG_STATIC_LINK_LIBS}) + if(TARGET ${STATIC_LINK_LIB}) + add_dependencies(${LIB_NAME}_objlib ${STATIC_LINK_LIB}) + endif() + endforeach() + endif() + endif() + + install(TARGETS ${LIB_NAME}_static ${INSTALL_IS_OPTIONAL} + EXPORT ${LIB_NAME}_targets + ARCHIVE DESTINATION ${INSTALL_ARCHIVE_DIR} + LIBRARY DESTINATION ${INSTALL_LIBRARY_DIR} + RUNTIME DESTINATION ${INSTALL_RUNTIME_DIR} + INCLUDES + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) + endif() + + if(ARG_CMAKE_PACKAGE_NAME) + arrow_install_cmake_package(${ARG_CMAKE_PACKAGE_NAME} ${LIB_NAME}_targets) + endif() + + if(ARG_PKG_CONFIG_NAME) + arrow_add_pkg_config("${ARG_PKG_CONFIG_NAME}") + endif() + + # Modify variable in calling scope + if(ARG_OUTPUTS) + set(${ARG_OUTPUTS} + ${${ARG_OUTPUTS}} + PARENT_SCOPE) + endif() +endfunction() + +# +# Benchmarking +# +# Add a new micro benchmark, with or without an executable that should be built. +# If benchmarks are enabled then they will be run along side unit tests with ctest. +# 'make benchmark' and 'make unittest' to build/run only benchmark or unittests, +# respectively. +# +# REL_BENCHMARK_NAME is the name of the benchmark app. It may be a single component +# (e.g. monotime-benchmark) or contain additional components (e.g. +# net/net_util-benchmark). Either way, the last component must be a globally +# unique name. + +# The benchmark will registered as unit test with ctest with a label +# of 'benchmark'. +# +# Arguments after the test name will be passed to set_tests_properties(). +# +# \arg PREFIX a string to append to the name of the benchmark executable. For +# example, if you have src/arrow/foo/bar-benchmark.cc, then PREFIX "foo" will +# create test executable foo-bar-benchmark +# \arg LABELS the benchmark label or labels to assign the unit tests to. By +# default, benchmarks will go in the "benchmark" group. Custom targets for the +# group names must exist +function(ADD_BENCHMARK REL_BENCHMARK_NAME) + set(options) + set(one_value_args PREFIX) + set(multi_value_args + EXTRA_LINK_LIBS + STATIC_LINK_LIBS + DEPENDENCIES + SOURCES + EXTRA_SOURCES + LABELS) + cmake_parse_arguments(ARG + "${options}" + "${one_value_args}" + "${multi_value_args}" + ${ARGN}) + if(ARG_UNPARSED_ARGUMENTS) + message(SEND_ERROR "Error: unrecognized arguments: ${ARG_UNPARSED_ARGUMENTS}") + endif() + + if(NO_BENCHMARKS) + return() + endif() + get_filename_component(BENCHMARK_NAME ${REL_BENCHMARK_NAME} NAME_WE) + + if(ARG_PREFIX) + set(BENCHMARK_NAME "${ARG_PREFIX}-${BENCHMARK_NAME}") + endif() + + set(SOURCES "") + + if(ARG_EXTRA_SOURCES) + list(APPEND SOURCES ${ARG_EXTRA_SOURCES}) + endif() + + if(ARG_SOURCES) + list(APPEND SOURCES ${ARG_SOURCES}) + else() + list(APPEND SOURCES "${REL_BENCHMARK_NAME}.cc") + endif() + + # Make sure the executable name contains only hyphens, not underscores + string(REPLACE "_" "-" BENCHMARK_NAME ${BENCHMARK_NAME}) + + if(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/${REL_BENCHMARK_NAME}.cc) + # This benchmark has a corresponding .cc file, set it up as an executable. + set(BENCHMARK_PATH "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${BENCHMARK_NAME}") + add_executable(${BENCHMARK_NAME} ${SOURCES}) + + if(ARG_STATIC_LINK_LIBS) + # Customize link libraries + target_link_libraries(${BENCHMARK_NAME} PRIVATE ${ARG_STATIC_LINK_LIBS}) + else() + target_link_libraries(${BENCHMARK_NAME} PRIVATE ${ARROW_BENCHMARK_LINK_LIBS}) + endif() + add_dependencies(benchmark ${BENCHMARK_NAME}) + + if(ARG_EXTRA_LINK_LIBS) + target_link_libraries(${BENCHMARK_NAME} PRIVATE ${ARG_EXTRA_LINK_LIBS}) + endif() + else() + # No executable, just invoke the benchmark (probably a script) directly. + set(BENCHMARK_PATH ${CMAKE_CURRENT_SOURCE_DIR}/${REL_BENCHMARK_NAME}) + endif() + + # With OSX and conda, we need to set the correct RPATH so that dependencies + # are found. The installed libraries with conda have an RPATH that matches + # for executables and libraries lying in $ENV{CONDA_PREFIX}/bin or + # $ENV{CONDA_PREFIX}/lib but our test libraries and executables are not + # installed there. + if(NOT "$ENV{CONDA_PREFIX}" STREQUAL "" AND APPLE) + set_target_properties(${BENCHMARK_NAME} + PROPERTIES BUILD_WITH_INSTALL_RPATH TRUE + INSTALL_RPATH_USE_LINK_PATH TRUE + INSTALL_RPATH + "$ENV{CONDA_PREFIX}/lib;${CMAKE_RUNTIME_OUTPUT_DIRECTORY}" + ) + endif() + + # Add test as dependency of relevant label targets + add_dependencies(all-benchmarks ${BENCHMARK_NAME}) + foreach(TARGET ${ARG_LABELS}) + add_dependencies(${TARGET} ${BENCHMARK_NAME}) + endforeach() + + if(ARG_DEPENDENCIES) + add_dependencies(${BENCHMARK_NAME} ${ARG_DEPENDENCIES}) + endif() + + if(ARG_LABELS) + set(ARG_LABELS "benchmark;${ARG_LABELS}") + else() + set(ARG_LABELS benchmark) + endif() + + if(ARROW_BUILD_DETAILED_BENCHMARKS) + target_compile_definitions(${BENCHMARK_NAME} PRIVATE ARROW_BUILD_DETAILED_BENCHMARKS) + endif() + + add_test(${BENCHMARK_NAME} + ${BUILD_SUPPORT_DIR}/run-test.sh + ${CMAKE_BINARY_DIR} + benchmark + ${BENCHMARK_PATH}) + + set_property(TEST ${BENCHMARK_NAME} + APPEND + PROPERTY LABELS ${ARG_LABELS}) +endfunction() + +# +# Testing +# +# Add a new test case, with or without an executable that should be built. +# +# REL_TEST_NAME is the name of the test. It may be a single component +# (e.g. monotime-test) or contain additional components (e.g. +# net/net_util-test). Either way, the last component must be a globally +# unique name. +# +# If given, SOURCES is the list of C++ source files to compile into the test +# executable. Otherwise, "REL_TEST_NAME.cc" is used. +# +# The unit test is added with a label of "unittest" to support filtering with +# ctest. +# +# Arguments after the test name will be passed to set_tests_properties(). +# +# \arg ENABLED if passed, add this unit test even if ARROW_BUILD_TESTS is off +# \arg PREFIX a string to append to the name of the test executable. For +# example, if you have src/arrow/foo/bar-test.cc, then PREFIX "foo" will create +# test executable foo-bar-test +# \arg LABELS the unit test label or labels to assign the unit tests +# to. By default, unit tests will go in the "unittest" group, but if we have +# multiple unit tests in some subgroup, you can assign a test to multiple +# groups use the syntax unittest;GROUP2;GROUP3. Custom targets for the group +# names must exist +function(ADD_TEST_CASE REL_TEST_NAME) + set(options NO_VALGRIND ENABLED) + set(one_value_args PREFIX) + set(multi_value_args + SOURCES + STATIC_LINK_LIBS + EXTRA_LINK_LIBS + EXTRA_INCLUDES + EXTRA_DEPENDENCIES + LABELS + EXTRA_LABELS + TEST_ARGUMENTS + DEFINITIONS) + cmake_parse_arguments(ARG + "${options}" + "${one_value_args}" + "${multi_value_args}" + ${ARGN}) + if(ARG_UNPARSED_ARGUMENTS) + message(SEND_ERROR "Error: unrecognized arguments: ${ARG_UNPARSED_ARGUMENTS}") + endif() + + if(NO_TESTS AND NOT ARG_ENABLED) + return() + endif() + get_filename_component(TEST_NAME ${REL_TEST_NAME} NAME_WE) + + if(ARG_PREFIX) + set(TEST_NAME "${ARG_PREFIX}-${TEST_NAME}") + endif() + + if(ARG_SOURCES) + set(SOURCES ${ARG_SOURCES}) + else() + set(SOURCES "${REL_TEST_NAME}.cc") + endif() + + # Make sure the executable name contains only hyphens, not underscores + string(REPLACE "_" "-" TEST_NAME ${TEST_NAME}) + + set(TEST_PATH "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${TEST_NAME}") + add_executable(${TEST_NAME} ${SOURCES}) + + # With OSX and conda, we need to set the correct RPATH so that dependencies + # are found. The installed libraries with conda have an RPATH that matches + # for executables and libraries lying in $ENV{CONDA_PREFIX}/bin or + # $ENV{CONDA_PREFIX}/lib but our test libraries and executables are not + # installed there. + if(NOT "$ENV{CONDA_PREFIX}" STREQUAL "" AND APPLE) + set_target_properties(${TEST_NAME} + PROPERTIES BUILD_WITH_INSTALL_RPATH TRUE + INSTALL_RPATH_USE_LINK_PATH TRUE + INSTALL_RPATH + "${CMAKE_RUNTIME_OUTPUT_DIRECTORY};$ENV{CONDA_PREFIX}/lib" + ) + endif() + + # Ensure using bundled GoogleTest when we use bundled GoogleTest. + # ARROW_GTEST_GTEST_HEADERS is defined only when we use bundled + # GoogleTest. + target_link_libraries(${TEST_NAME} PRIVATE ${ARROW_GTEST_GTEST_HEADERS}) + + if(ARG_STATIC_LINK_LIBS) + # Customize link libraries + target_link_libraries(${TEST_NAME} PRIVATE ${ARG_STATIC_LINK_LIBS}) + else() + target_link_libraries(${TEST_NAME} PRIVATE ${ARROW_TEST_LINK_LIBS}) + endif() + + if(ARG_EXTRA_LINK_LIBS) + target_link_libraries(${TEST_NAME} PRIVATE ${ARG_EXTRA_LINK_LIBS}) + endif() + + if(ARG_EXTRA_INCLUDES) + target_include_directories(${TEST_NAME} SYSTEM PUBLIC ${ARG_EXTRA_INCLUDES}) + endif() + + if(ARG_EXTRA_DEPENDENCIES) + add_dependencies(${TEST_NAME} ${ARG_EXTRA_DEPENDENCIES}) + endif() + + if(ARG_DEFINITIONS) + target_compile_definitions(${TEST_NAME} PRIVATE ${ARG_DEFINITIONS}) + endif() + + if(ARROW_TEST_MEMCHECK AND NOT ARG_NO_VALGRIND) + add_test(${TEST_NAME} + bash + -c + "cd '${CMAKE_SOURCE_DIR}'; \ + valgrind --suppressions=valgrind.supp --tool=memcheck --gen-suppressions=all \ + --num-callers=500 --leak-check=full --leak-check-heuristics=stdstring \ + --error-exitcode=1 ${TEST_PATH} ${ARG_TEST_ARGUMENTS}") + elseif(WIN32 OR CMAKE_SYSTEM_NAME STREQUAL "Emscripten") + add_test(NAME ${TEST_NAME} COMMAND ${TEST_NAME} ${ARG_TEST_ARGUMENTS}) + else() + add_test(${TEST_NAME} + ${BUILD_SUPPORT_DIR}/run-test.sh + ${CMAKE_BINARY_DIR} + test + ${TEST_PATH} + ${ARG_TEST_ARGUMENTS}) + endif() + + # Add test as dependency of relevant targets + add_dependencies(all-tests ${TEST_NAME}) + foreach(TARGET ${ARG_LABELS}) + add_dependencies(${TARGET} ${TEST_NAME}) + endforeach() + + set(LABELS) + list(APPEND LABELS "unittest") + if(ARG_LABELS) + list(APPEND LABELS ${ARG_LABELS}) + endif() + # EXTRA_LABELS don't create their own dependencies, they are only used + # to ease running certain test categories. + if(ARG_EXTRA_LABELS) + list(APPEND LABELS ${ARG_EXTRA_LABELS}) + endif() + + foreach(LABEL ${ARG_LABELS}) + # ensure there is a cmake target which exercises tests with this LABEL + set(LABEL_TEST_NAME "test-${LABEL}") + if(NOT TARGET ${LABEL_TEST_NAME}) + add_custom_target(${LABEL_TEST_NAME} + ctest -L "${LABEL}" --output-on-failure + USES_TERMINAL) + endif() + # ensure the test is (re)built before the LABEL test runs + add_dependencies(${LABEL_TEST_NAME} ${TEST_NAME}) + endforeach() + + set_property(TEST ${TEST_NAME} + APPEND + PROPERTY LABELS ${LABELS}) +endfunction() + +# +# Examples +# +# Add a new example, with or without an executable that should be built. +# If examples are enabled then they will be run along side unit tests with ctest. +# 'make runexample' to build/run only examples. +# +# REL_EXAMPLE_NAME is the name of the example app. It may be a single component +# (e.g. monotime-example) or contain additional components (e.g. +# net/net_util-example). Either way, the last component must be a globally +# unique name. + +# The example will registered as unit test with ctest with a label +# of 'example'. +# +# Arguments after the test name will be passed to set_tests_properties(). +# +# \arg PREFIX a string to append to the name of the example executable. For +# example, if you have src/arrow/foo/bar-example.cc, then PREFIX "foo" will +# create test executable foo-bar-example +function(ADD_ARROW_EXAMPLE REL_EXAMPLE_NAME) + set(options) + set(one_value_args PREFIX) + set(multi_value_args EXTRA_INCLUDES EXTRA_LINK_LIBS EXTRA_SOURCES DEPENDENCIES) + cmake_parse_arguments(ARG + "${options}" + "${one_value_args}" + "${multi_value_args}" + ${ARGN}) + if(ARG_UNPARSED_ARGUMENTS) + message(SEND_ERROR "Error: unrecognized arguments: ${ARG_UNPARSED_ARGUMENTS}") + endif() + + if(NO_EXAMPLES) + return() + endif() + get_filename_component(EXAMPLE_NAME ${REL_EXAMPLE_NAME} NAME_WE) + + if(ARG_PREFIX) + set(EXAMPLE_NAME "${ARG_PREFIX}-${EXAMPLE_NAME}") + endif() + + # Make sure the executable name contains only hyphens, not underscores + string(REPLACE "_" "-" EXAMPLE_NAME ${EXAMPLE_NAME}) + + if(EXISTS ${CMAKE_SOURCE_DIR}/examples/arrow/${REL_EXAMPLE_NAME}.cc) + # This example has a corresponding .cc file, set it up as an executable. + set(EXAMPLE_PATH "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${EXAMPLE_NAME}") + add_executable(${EXAMPLE_NAME} "${REL_EXAMPLE_NAME}.cc" ${ARG_EXTRA_SOURCES}) + target_link_libraries(${EXAMPLE_NAME} ${ARROW_EXAMPLE_LINK_LIBS}) + add_dependencies(runexample ${EXAMPLE_NAME}) + + if(ARG_EXTRA_LINK_LIBS) + target_link_libraries(${EXAMPLE_NAME} ${ARG_EXTRA_LINK_LIBS}) + endif() + endif() + + if(ARG_DEPENDENCIES) + add_dependencies(${EXAMPLE_NAME} ${ARG_DEPENDENCIES}) + endif() + + if(ARG_EXTRA_INCLUDES) + target_include_directories(${EXAMPLE_NAME} SYSTEM PUBLIC ${ARG_EXTRA_INCLUDES}) + endif() + + add_test(${EXAMPLE_NAME} ${EXAMPLE_PATH}) + set_tests_properties(${EXAMPLE_NAME} PROPERTIES LABELS "example") +endfunction() + +# +# Fuzzing +# +# Add new fuzz target executable. +# +# The single source file must define a function: +# extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) +# +# No main function must be present within the source file! +# +function(ADD_FUZZ_TARGET REL_FUZZING_NAME) + set(options) + set(one_value_args PREFIX) + set(multi_value_args LINK_LIBS) + cmake_parse_arguments(ARG + "${options}" + "${one_value_args}" + "${multi_value_args}" + ${ARGN}) + if(ARG_UNPARSED_ARGUMENTS) + message(SEND_ERROR "Error: unrecognized arguments: ${ARG_UNPARSED_ARGUMENTS}") + endif() + + if(NOT ARROW_FUZZING) + return() + endif() + + get_filename_component(FUZZING_NAME ${REL_FUZZING_NAME} NAME_WE) + + # Make sure the executable name contains only hyphens, not underscores + string(REPLACE "_" "-" FUZZING_NAME ${FUZZING_NAME}) + + if(ARG_PREFIX) + set(FUZZING_NAME "${ARG_PREFIX}-${FUZZING_NAME}") + endif() + + # For OSS-Fuzz + # (https://google.github.io/oss-fuzz/advanced-topics/ideal-integration/) + if(DEFINED ENV{LIB_FUZZING_ENGINE}) + set(FUZZ_LDFLAGS $ENV{LIB_FUZZING_ENGINE}) + else() + set(FUZZ_LDFLAGS "-fsanitize=fuzzer") + endif() + + add_executable(${FUZZING_NAME} "${REL_FUZZING_NAME}.cc") + target_link_libraries(${FUZZING_NAME} ${LINK_LIBS}) + target_compile_options(${FUZZING_NAME} PRIVATE ${FUZZ_LDFLAGS}) + set_target_properties(${FUZZING_NAME} PROPERTIES LINK_FLAGS ${FUZZ_LDFLAGS} LABELS + "fuzzing") +endfunction() + +function(ARROW_INSTALL_ALL_HEADERS PATH) + set(options) + set(one_value_args) + set(multi_value_args PATTERN) + cmake_parse_arguments(ARG + "${options}" + "${one_value_args}" + "${multi_value_args}" + ${ARGN}) + if(NOT ARG_PATTERN) + # The .hpp extension is used by some vendored libraries + set(ARG_PATTERN "*.h" "*.hpp") + endif() + file(GLOB CURRENT_DIRECTORY_HEADERS ${ARG_PATTERN}) + + set(PUBLIC_HEADERS) + foreach(HEADER ${CURRENT_DIRECTORY_HEADERS}) + get_filename_component(HEADER_BASENAME ${HEADER} NAME) + if(HEADER_BASENAME MATCHES "internal") + continue() + endif() + list(APPEND PUBLIC_HEADERS ${HEADER}) + endforeach() + install(FILES ${PUBLIC_HEADERS} DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/${PATH}") +endfunction() + +function(ARROW_ADD_PKG_CONFIG MODULE) + configure_file(${MODULE}.pc.in "${CMAKE_CURRENT_BINARY_DIR}/${MODULE}.pc.generate.in" + @ONLY) + file(GENERATE + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/$/${MODULE}.pc" + INPUT "${CMAKE_CURRENT_BINARY_DIR}/${MODULE}.pc.generate.in") + install(FILES "${CMAKE_CURRENT_BINARY_DIR}/$/${MODULE}.pc" + DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig/") +endfunction() + +# Implementations of lisp "car" and "cdr" functions +macro(ARROW_CAR var) + set(${var} ${ARGV1}) +endmacro() + +macro(ARROW_CDR var rest) + set(${var} ${ARGN}) +endmacro() diff --git a/python/cmake_modules/DefineOptions.cmake b/python/cmake_modules/DefineOptions.cmake new file mode 100644 index 000000000000..5d34ff50e35c --- /dev/null +++ b/python/cmake_modules/DefineOptions.cmake @@ -0,0 +1,773 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +macro(set_option_category name) + set(ARROW_OPTION_CATEGORY ${name}) + list(APPEND "ARROW_OPTION_CATEGORIES" ${name}) +endmacro() + +function(check_description_length name description) + foreach(description_line ${description}) + string(LENGTH ${description_line} line_length) + if(${line_length} GREATER 80) + message(FATAL_ERROR "description for ${name} contained a\n\ + line ${line_length} characters long!\n\ + (max is 80). Split it into more lines with semicolons") + endif() + endforeach() +endfunction() + +macro(define_option name description default) + set(options) + set(one_value_args) + set(multi_value_args DEPENDS) + cmake_parse_arguments(ARG + "${options}" + "${one_value_args}" + "${multi_value_args}" + ${ARGN}) + if(ARG_UNPARSED_ARGUMENTS) + message(SEND_ERROR "Error: unrecognized arguments: ${ARG_UNPARSED_ARGUMENTS}") + endif() + + check_description_length(${name} ${description}) + list(JOIN description "\n" multiline_description) + + option(${name} "${multiline_description}" ${default}) + + list(APPEND "ARROW_${ARROW_OPTION_CATEGORY}_OPTION_NAMES" ${name}) + set("${name}_OPTION_DESCRIPTION" ${description}) + set("${name}_OPTION_DEFAULT" ${default}) + set("${name}_OPTION_TYPE" "bool") + set("${name}_OPTION_DEPENDS" ${ARG_DEPENDS}) +endmacro() + +macro(define_option_string name description default) + check_description_length(${name} ${description}) + list(JOIN description "\n" multiline_description) + + set(${name} + ${default} + CACHE STRING "${multiline_description}") + + list(APPEND "ARROW_${ARROW_OPTION_CATEGORY}_OPTION_NAMES" ${name}) + set("${name}_OPTION_DESCRIPTION" ${description}) + set("${name}_OPTION_DEFAULT" "\"${default}\"") + set("${name}_OPTION_TYPE" "string") + set("${name}_OPTION_POSSIBLE_VALUES" ${ARGN}) + list(FIND ${name}_OPTION_POSSIBLE_VALUES "${default}" default_value_index) + if(NOT ${default_value_index} EQUAL -1) + list(REMOVE_AT ${name}_OPTION_POSSIBLE_VALUES ${default_value_index}) + list(PREPEND ${name}_OPTION_POSSIBLE_VALUES "${default}") + endif() + list(JOIN "${name}_OPTION_POSSIBLE_VALUES" "|" "${name}_OPTION_ENUM") + if(NOT ("${${name}_OPTION_ENUM}" STREQUAL "")) + set_property(CACHE ${name} PROPERTY STRINGS "${name}_OPTION_POSSIBLE_VALUES") + endif() +endmacro() + +# Topological sort by Tarjan's algorithm. +set(ARROW_BOOL_OPTION_DEPENDENCIES_TSORTED) +macro(tsort_bool_option_dependencies_visit option_name) + if("${${option_name}_TSORT_STATUS}" STREQUAL "VISITING") + message(FATAL_ERROR "Cyclic option dependency is detected: ${option_name}") + elseif("${${option_name}_TSORT_STATUS}" STREQUAL "") + set(${option_name}_TSORT_STATUS "VISITING") + foreach(needed_option_name ${${option_name}_OPTION_DEPENDS}) + tsort_bool_option_dependencies_visit(${needed_option_name}) + endforeach() + set(${option_name}_TSORT_STATUS "VISITED") + list(INSERT ARROW_BOOL_OPTION_DEPENDENCIES_TSORTED 0 ${option_name}) + endif() +endmacro() +macro(tsort_bool_option_dependencies) + foreach(category ${ARROW_OPTION_CATEGORIES}) + foreach(option_name ${ARROW_${category}_OPTION_NAMES}) + if("${${option_name}_OPTION_TYPE}" STREQUAL "bool") + if("${${option_name}_TSORT_STATUS}" STREQUAL "") + tsort_bool_option_dependencies_visit(${option_name}) + endif() + endif() + endforeach() + endforeach() +endmacro() + +macro(resolve_option_dependencies) + # Arrow Flight SQL ODBC is available only for Windows and macOS for now. + if(NOT WIN32 AND NOT APPLE) + set(ARROW_FLIGHT_SQL_ODBC OFF) + endif() + if(MSVC_TOOLCHAIN) + set(ARROW_USE_GLOG OFF) + endif() + # Tests are crashed with mold + sanitizer checks. + if(ARROW_USE_ASAN + OR ARROW_USE_TSAN + OR ARROW_USE_UBSAN) + if(ARROW_USE_MOLD) + message(WARNING "ARROW_USE_MOLD is disabled when one of " + "ARROW_USE_ASAN, ARROW_USE_TSAN or ARROW_USE_UBSAN is specified " + "because it causes some problems.") + set(ARROW_USE_MOLD OFF) + endif() + endif() + + tsort_bool_option_dependencies() + foreach(option_name ${ARROW_BOOL_OPTION_DEPENDENCIES_TSORTED}) + if(${${option_name}}) + foreach(depended_option_name ${${option_name}_OPTION_DEPENDS}) + set(${depended_option_name} ON) + endforeach() + endif() + endforeach() +endmacro() + +# Top level cmake dir +if("${CMAKE_SOURCE_DIR}" STREQUAL "${CMAKE_CURRENT_SOURCE_DIR}") + set(ARROW_DEFINE_OPTIONS_DEFAULT ON) +else() + set(ARROW_DEFINE_OPTIONS_DEFAULT OFF) +endif() +option(ARROW_DEFINE_OPTIONS "Define Arrow options" ${ARROW_DEFINE_OPTIONS_DEFAULT}) +if(ARROW_DEFINE_OPTIONS) + #---------------------------------------------------------------------- + set_option_category("Compile and link") + + define_option_string(ARROW_CXXFLAGS "Compiler flags to append when compiling Arrow" "") + + define_option(ARROW_BUILD_STATIC "Build static libraries" ON) + + define_option(ARROW_BUILD_SHARED "Build shared libraries" ON) + + define_option_string(ARROW_PACKAGE_KIND + "Arbitrary string that identifies the kind of package;\ +(for informational purposes)" "") + + define_option_string(ARROW_GIT_ID "The Arrow git commit id (if any)" "") + + define_option_string(ARROW_GIT_DESCRIPTION "The Arrow git commit description (if any)" + "") + + define_option(ARROW_POSITION_INDEPENDENT_CODE + "Whether to create position-independent target" ON) + + define_option(ARROW_USE_CCACHE "Use ccache when compiling (if available)" ON) + + define_option(ARROW_USE_SCCACHE "Use sccache when compiling (if available),;\ +takes precedence over ccache if a storage backend is configured" ON) + + define_option(ARROW_USE_LLD "Use the LLVM lld for linking (if available)" OFF) + + define_option(ARROW_USE_MOLD "Use mold for linking on Linux (if available)" OFF) + + define_option_string(ARROW_SIMD_LEVEL + "Compile-time SIMD optimization level" + "DEFAULT" # default to SSE4_2 on x86, NEON on Arm, NONE otherwise + "NONE" + "SSE4_2" + "AVX2" + "AVX512" + "NEON" + "SVE" # size agnostic SVE + "SVE128" # fixed size SVE + "SVE256" # " + "SVE512" # " + "DEFAULT") + + define_option_string(ARROW_RUNTIME_SIMD_LEVEL + "Max runtime SIMD optimization level" + "MAX" # default to max supported by compiler + "NONE" + "SSE4_2" + "AVX2" + "AVX512" + "MAX") + + define_option(ARROW_ALTIVEC "Build with Altivec if compiler has support" ON) + + define_option(ARROW_RPATH_ORIGIN "Build Arrow libraries with RATH set to \$ORIGIN" OFF) + + define_option(ARROW_INSTALL_NAME_RPATH + "Build Arrow libraries with install_name set to @rpath" ON) + + define_option(ARROW_GGDB_DEBUG "Pass -ggdb flag to debug builds" ON) + + define_option(ARROW_WITH_MUSL "Whether the system libc is musl or not" OFF) + + define_option(ARROW_ENABLE_THREADING "Enable threading in Arrow core" ON) + + #---------------------------------------------------------------------- + set_option_category("Tests and benchmarks") + + define_option(ARROW_BUILD_EXAMPLES "Build the Arrow examples" OFF) + + define_option(ARROW_BUILD_TESTS + "Build the Arrow googletest unit tests" + OFF + DEPENDS + ARROW_IPC + ARROW_TESTING) + + define_option(ARROW_ENABLE_TIMING_TESTS "Enable timing-sensitive tests" ON) + + define_option(ARROW_BUILD_INTEGRATION + "Build the Arrow integration test executables" + OFF + DEPENDS + ARROW_TESTING) + + define_option(ARROW_BUILD_BENCHMARKS + "Build the Arrow micro benchmarks" + OFF + DEPENDS + ARROW_IPC + ARROW_TESTING) + + # Reference benchmarks are used to compare to naive implementation, or + # discover various hardware limits. + define_option(ARROW_BUILD_BENCHMARKS_REFERENCE + "Build the Arrow micro reference benchmarks" OFF) + + define_option(ARROW_BUILD_DETAILED_BENCHMARKS + "Build benchmarks that do a longer exploration of performance" OFF) + + if(ARROW_BUILD_SHARED) + set(ARROW_TEST_LINKAGE_DEFAULT "shared") + else() + set(ARROW_TEST_LINKAGE_DEFAULT "static") + endif() + + define_option_string(ARROW_TEST_LINKAGE + "Linkage of Arrow libraries with unit tests executables." + "${ARROW_TEST_LINKAGE_DEFAULT}" + "shared" + "static") + + define_option(ARROW_BUILD_FUZZING_UTILITIES + "Build command line utilities for fuzzing" + OFF + DEPENDS + ARROW_TESTING + ARROW_WITH_BROTLI + ARROW_WITH_LZ4 + ARROW_WITH_ZSTD) + + define_option(ARROW_FUZZING + "Build Arrow fuzz targets" + OFF + DEPENDS + ARROW_BUILD_FUZZING_UTILITIES) + + define_option(ARROW_LARGE_MEMORY_TESTS "Enable unit tests which use large memory" OFF) + + #---------------------------------------------------------------------- + set_option_category("Coverage") + + define_option(ARROW_GENERATE_COVERAGE "Build with C++ code coverage enabled" OFF) + + #---------------------------------------------------------------------- + set_option_category("Checks") + + define_option(ARROW_TEST_MEMCHECK "Run the test suite using valgrind --tool=memcheck" + OFF) + + define_option(ARROW_USE_ASAN "Enable Address Sanitizer checks" OFF) + + define_option(ARROW_USE_TSAN "Enable Thread Sanitizer checks" OFF) + + define_option(ARROW_USE_UBSAN "Enable Undefined Behavior sanitizer checks" OFF) + + #---------------------------------------------------------------------- + set_option_category("Project component") + + define_option(ARROW_ACERO + "Build the Arrow Acero Engine Module" + OFF + DEPENDS + ARROW_COMPUTE + ARROW_IPC) + + define_option(ARROW_AZURE + "Build Arrow with Azure support (requires the Azure SDK for C++)" + OFF + DEPENDS + ARROW_FILESYSTEM) + + define_option(ARROW_BUILD_UTILITIES "Build Arrow command line utilities" OFF) + + define_option(ARROW_COMPUTE "Build all Arrow Compute kernels" OFF) + + define_option(ARROW_CSV "Build the Arrow CSV Parser Module" OFF) + + define_option(ARROW_CUDA + "Build the Arrow CUDA extensions (requires CUDA toolkit)" + OFF + DEPENDS + ARROW_IPC) + + define_option(ARROW_DATASET + "Build the Arrow Dataset Modules" + OFF + DEPENDS + ARROW_ACERO + ARROW_FILESYSTEM) + + define_option(ARROW_FILESYSTEM "Build the Arrow Filesystem Layer" OFF) + + define_option(ARROW_FLIGHT + "Build the Arrow Flight RPC System (requires GRPC, Protocol Buffers)" + OFF + DEPENDS + ARROW_IPC) + + define_option(ARROW_FLIGHT_SQL + "Build the Arrow Flight SQL extension" + OFF + DEPENDS + ARROW_FLIGHT) + + define_option(ARROW_FLIGHT_SQL_ODBC + "Build the Arrow Flight SQL ODBC extension" + OFF + DEPENDS + ARROW_FLIGHT_SQL + ARROW_COMPUTE) + + define_option(ARROW_GANDIVA + "Build the Gandiva libraries" + OFF + DEPENDS + ARROW_WITH_RE2 + ARROW_WITH_UTF8PROC) + + define_option(ARROW_GCS + "Build Arrow with GCS support (requires the Google Cloud Platform " + "C++ Client Libraries)" + OFF + DEPENDS + ARROW_FILESYSTEM) + + define_option(ARROW_HDFS + "Build the Arrow HDFS bridge" + OFF + DEPENDS + ARROW_FILESYSTEM) + + define_option(ARROW_IPC "Build the Arrow IPC extensions" ON) + + define_option(ARROW_JEMALLOC "Build the Arrow jemalloc-based allocator" OFF) + + define_option(ARROW_JSON "Build Arrow with JSON support (requires RapidJSON)" OFF) + + define_option(ARROW_MIMALLOC "Build the Arrow mimalloc-based allocator" ON) + + define_option(ARROW_PARQUET + "Build the Parquet libraries" + OFF + DEPENDS + ARROW_IPC) + + define_option(ARROW_ORC + "Build the Arrow ORC adapter" + OFF + DEPENDS + ARROW_WITH_LZ4 + ARROW_WITH_SNAPPY + ARROW_WITH_ZLIB + ARROW_WITH_ZSTD) + + define_option(ARROW_PYTHON + "Build some components needed by PyArrow.;\ +(This is a deprecated option. Use CMake presets instead.)" + OFF + DEPENDS + ARROW_CSV + ARROW_DATASET + ARROW_FILESYSTEM + ARROW_HDFS + ARROW_JSON) + + define_option(ARROW_S3 + "Build Arrow with S3 support (requires the AWS SDK for C++)" + OFF + DEPENDS + ARROW_FILESYSTEM) + + define_option(ARROW_S3_MODULE + "Build the Arrow S3 filesystem as a dynamic module" + OFF + DEPENDS + ARROW_S3) + + define_option(ARROW_SUBSTRAIT + "Build the Arrow Substrait Consumer Module" + OFF + DEPENDS + ARROW_DATASET + ARROW_IPC + ARROW_PARQUET) + + define_option(ARROW_TENSORFLOW "Build Arrow with TensorFlow support enabled" OFF) + + define_option(ARROW_TESTING + "Build the Arrow testing libraries" + OFF + DEPENDS + ARROW_JSON) + + #---------------------------------------------------------------------- + set_option_category("Thirdparty toolchain") + + # Determine how we will look for dependencies + # * AUTO: Guess which packaging systems we're running in and pull the + # dependencies from there. Build any missing ones through the + # ExternalProject setup. This is the default unless the CONDA_PREFIX + # environment variable is set, in which case the CONDA method is used + # * BUNDLED: Build dependencies through CMake's ExternalProject facility. If + # you wish to build individual dependencies from source instead of using + # one of the other methods, pass -D$NAME_SOURCE=BUNDLED + # * SYSTEM: Use CMake's find_package and find_library without any custom + # paths. If individual packages are on non-default locations, you can pass + # $NAME_ROOT arguments to CMake, or set environment variables for the same. + # If your system packages are in a non-default location, or if you are using + # a non-standard toolchain, you can also pass ARROW_PACKAGE_PREFIX to set + # the *_ROOT variables to look in that directory + # * CONDA: Same as SYSTEM but set all *_ROOT variables to + # ENV{CONDA_PREFIX}. If this is run within an active conda environment, + # then ENV{CONDA_PREFIX} will be used for dependencies unless + # ARROW_DEPENDENCY_SOURCE is set explicitly to one of the other options + # * VCPKG: Searches for dependencies installed by vcpkg. + # * BREW: Use SYSTEM but search for select packages with brew. + if(NOT "$ENV{CONDA_PREFIX}" STREQUAL "") + set(ARROW_DEPENDENCY_SOURCE_DEFAULT "CONDA") + else() + set(ARROW_DEPENDENCY_SOURCE_DEFAULT "AUTO") + endif() + define_option_string(ARROW_DEPENDENCY_SOURCE + "Method to use for acquiring arrow's build dependencies" + "${ARROW_DEPENDENCY_SOURCE_DEFAULT}" + "AUTO" + "BUNDLED" + "SYSTEM" + "CONDA" + "VCPKG" + "BREW") + + define_option(ARROW_VERBOSE_THIRDPARTY_BUILD + "Show output from ExternalProjects rather than just logging to files" OFF) + + define_option(ARROW_DEPENDENCY_USE_SHARED "Link to shared libraries" ON) + + define_option(ARROW_BOOST_USE_SHARED "Rely on Boost shared libraries where relevant" + ${ARROW_DEPENDENCY_USE_SHARED}) + + define_option(ARROW_BROTLI_USE_SHARED "Rely on Brotli shared libraries where relevant" + ${ARROW_DEPENDENCY_USE_SHARED}) + + define_option(ARROW_BZ2_USE_SHARED "Rely on Bz2 shared libraries where relevant" + ${ARROW_DEPENDENCY_USE_SHARED}) + + define_option(ARROW_GFLAGS_USE_SHARED "Rely on GFlags shared libraries where relevant" + ${ARROW_DEPENDENCY_USE_SHARED}) + + define_option(ARROW_GRPC_USE_SHARED "Rely on gRPC shared libraries where relevant" + ${ARROW_DEPENDENCY_USE_SHARED}) + + define_option(ARROW_JEMALLOC_USE_SHARED + "Rely on jemalloc shared libraries where relevant" + ${ARROW_DEPENDENCY_USE_SHARED}) + + if(MSVC) + # LLVM doesn't support shared library with MSVC. + set(ARROW_LLVM_USE_SHARED_DEFAULT OFF) + else() + set(ARROW_LLVM_USE_SHARED_DEFAULT ${ARROW_DEPENDENCY_USE_SHARED}) + endif() + define_option(ARROW_LLVM_USE_SHARED "Rely on LLVM shared libraries where relevant" + ${ARROW_LLVM_USE_SHARED_DEFAULT}) + + define_option(ARROW_LZ4_USE_SHARED "Rely on lz4 shared libraries where relevant" + ${ARROW_DEPENDENCY_USE_SHARED}) + + define_option(ARROW_OPENSSL_USE_SHARED + "Rely on OpenSSL shared libraries where relevant" + ${ARROW_DEPENDENCY_USE_SHARED}) + + define_option(ARROW_PROTOBUF_USE_SHARED + "Rely on Protocol Buffers shared libraries where relevant" + ${ARROW_DEPENDENCY_USE_SHARED}) + + define_option(ARROW_SNAPPY_USE_SHARED "Rely on snappy shared libraries where relevant" + ${ARROW_DEPENDENCY_USE_SHARED}) + + if(WIN32) + # It seems that Thrift doesn't support DLL well yet. + # MSYS2, conda-forge and vcpkg don't build shared library. + set(ARROW_THRIFT_USE_SHARED_DEFAULT OFF) + else() + set(ARROW_THRIFT_USE_SHARED_DEFAULT ${ARROW_DEPENDENCY_USE_SHARED}) + endif() + define_option(ARROW_THRIFT_USE_SHARED "Rely on thrift shared libraries where relevant" + ${ARROW_THRIFT_USE_SHARED_DEFAULT}) + + define_option(ARROW_UTF8PROC_USE_SHARED + "Rely on utf8proc shared libraries where relevant" + ${ARROW_DEPENDENCY_USE_SHARED}) + + define_option(ARROW_ZSTD_USE_SHARED "Rely on zstd shared libraries where relevant" + ${ARROW_DEPENDENCY_USE_SHARED}) + + define_option(ARROW_USE_GLOG "Build libraries with glog support for pluggable logging" + OFF) + + define_option(ARROW_WITH_BACKTRACE "Build with backtrace support" ON) + + define_option(ARROW_WITH_OPENTELEMETRY + "Build libraries with OpenTelemetry support for distributed tracing" OFF) + + define_option(ARROW_WITH_BROTLI "Build with Brotli compression" OFF) + define_option(ARROW_WITH_BZ2 "Build with BZ2 compression" OFF) + define_option(ARROW_WITH_LZ4 "Build with lz4 compression" OFF) + define_option(ARROW_WITH_SNAPPY "Build with Snappy compression" OFF) + define_option(ARROW_WITH_ZLIB "Build with zlib compression" OFF) + define_option(ARROW_WITH_ZSTD "Build with zstd compression" OFF) + + define_option(ARROW_WITH_UTF8PROC + "Build with support for Unicode properties using the utf8proc library;(only used if ARROW_COMPUTE is ON or ARROW_GANDIVA is ON)" + ON) + define_option(ARROW_WITH_RE2 + "Build with support for regular expressions using the re2 library;(only used if ARROW_COMPUTE or ARROW_GANDIVA is ON)" + ON) + + #---------------------------------------------------------------------- + if(MSVC_TOOLCHAIN) + set_option_category("MSVC") + + define_option(MSVC_LINK_VERBOSE + "Pass verbose linking options when linking libraries and executables" + OFF) + + define_option_string(BROTLI_MSVC_STATIC_LIB_SUFFIX + "Brotli static lib suffix used on Windows with MSVC" "-static") + + define_option_string(PROTOBUF_MSVC_STATIC_LIB_SUFFIX + "Protobuf static lib suffix used on Windows with MSVC" "") + + define_option_string(RE2_MSVC_STATIC_LIB_SUFFIX + "re2 static lib suffix used on Windows with MSVC" "_static") + + if(DEFINED ENV{CONDA_PREFIX}) + # Conda package changes the output name. + # https://github.com/conda-forge/snappy-feedstock/blob/main/recipe/windows-static-lib-name.patch + set(SNAPPY_MSVC_STATIC_LIB_SUFFIX_DEFAULT "_static") + else() + set(SNAPPY_MSVC_STATIC_LIB_SUFFIX_DEFAULT "") + endif() + define_option_string(SNAPPY_MSVC_STATIC_LIB_SUFFIX + "Snappy static lib suffix used on Windows with MSVC" + "${SNAPPY_MSVC_STATIC_LIB_SUFFIX_DEFAULT}") + + define_option_string(LZ4_MSVC_STATIC_LIB_SUFFIX + "Lz4 static lib suffix used on Windows with MSVC" "_static") + + define_option_string(ZSTD_MSVC_STATIC_LIB_SUFFIX + "ZStd static lib suffix used on Windows with MSVC" "_static") + + define_option(ARROW_USE_STATIC_CRT "Build Arrow with statically linked CRT" OFF) + endif() + + #---------------------------------------------------------------------- + set_option_category("Parquet") + + define_option(PARQUET_BUILD_EXECUTABLES + "Build the Parquet executable CLI tools. Requires static libraries to be built." + OFF) + + define_option(PARQUET_BUILD_EXAMPLES + "Build the Parquet examples. Requires static libraries to be built." OFF) + + define_option(PARQUET_REQUIRE_ENCRYPTION + "Build support for encryption. Fail if OpenSSL is not found" + OFF + DEPENDS + ARROW_FILESYSTEM) + + #---------------------------------------------------------------------- + set_option_category("Gandiva") + + # ARROW-3860: Temporary workaround + define_option(ARROW_GANDIVA_STATIC_LIBSTDCPP + "Include -static-libstdc++ -static-libgcc when linking with;Gandiva static libraries" + OFF) + + define_option_string(ARROW_GANDIVA_PC_CXX_FLAGS + "Compiler flags to append when pre-compiling Gandiva operations" + "") + + #---------------------------------------------------------------------- + set_option_category("Cross compiling") + + define_option_string(ARROW_GRPC_CPP_PLUGIN "grpc_cpp_plugin path to be used" "") + + #---------------------------------------------------------------------- + set_option_category("Advanced developer") + + define_option(ARROW_EXTRA_ERROR_CONTEXT + "Compile with extra error context (line numbers, code)" OFF) + + define_option(ARROW_OPTIONAL_INSTALL + "If enabled install ONLY targets that have already been built. Please be;\ +advised that if this is enabled 'install' will fail silently on components;\ +that have not been built" + OFF) + + define_option_string(ARROW_GDB_INSTALL_DIR + "Use a custom install directory for GDB plugin.;\ +In general, you don't need to specify this because the default;\ +(CMAKE_INSTALL_FULL_BINDIR on Windows, CMAKE_INSTALL_FULL_LIBDIR otherwise);\ +is reasonable." + "") + + option(ARROW_BUILD_CONFIG_SUMMARY_JSON "Summarize build configuration in a JSON file" + ON) + + resolve_option_dependencies() +endif() + +macro(validate_config) + foreach(category ${ARROW_OPTION_CATEGORIES}) + set(option_names ${ARROW_${category}_OPTION_NAMES}) + + foreach(name ${option_names}) + set(possible_values ${${name}_OPTION_POSSIBLE_VALUES}) + set(value "${${name}}") + if(possible_values) + if(NOT "${value}" IN_LIST possible_values) + message(FATAL_ERROR "Configuration option ${name} got invalid value '${value}'. " + "Allowed values: ${${name}_OPTION_ENUM}.") + endif() + endif() + endforeach() + + endforeach() +endmacro() + +macro(config_summary_message) + message(STATUS "---------------------------------------------------------------------") + message(STATUS "Arrow version: ${ARROW_VERSION}") + message(STATUS) + message(STATUS "Build configuration summary:") + + message(STATUS " Generator: ${CMAKE_GENERATOR}") + message(STATUS " Build type: ${CMAKE_BUILD_TYPE}") + message(STATUS " Source directory: ${CMAKE_CURRENT_SOURCE_DIR}") + message(STATUS " Install prefix: ${CMAKE_INSTALL_PREFIX}") + if(${CMAKE_EXPORT_COMPILE_COMMANDS}) + message(STATUS " Compile commands: ${CMAKE_CURRENT_BINARY_DIR}/compile_commands.json" + ) + endif() + + foreach(category ${ARROW_OPTION_CATEGORIES}) + + message(STATUS) + message(STATUS "${category} options:") + message(STATUS) + + set(option_names ${ARROW_${category}_OPTION_NAMES}) + + foreach(name ${option_names}) + set(value "${${name}}") + if("${value}" STREQUAL "") + set(value "\"\"") + endif() + + set(description ${${name}_OPTION_DESCRIPTION}) + + if(NOT ("${${name}_OPTION_ENUM}" STREQUAL "")) + set(summary "=${value} [default=${${name}_OPTION_ENUM}]") + else() + set(summary "=${value} [default=${${name}_OPTION_DEFAULT}]") + endif() + + message(STATUS " ${name}${summary}") + foreach(description_line ${description}) + message(STATUS " ${description_line}") + endforeach() + endforeach() + + endforeach() + + if(ARROW_PYTHON) + message(WARNING "ARROW_PYTHON is deprecated. Use CMake presets instead.") + endif() +endmacro() + +macro(config_summary_json) + set(summary "${CMAKE_CURRENT_BINARY_DIR}/cmake_summary.json") + message(STATUS " Outputting build configuration summary to ${summary}") + file(WRITE ${summary} "{\n") + + foreach(category ${ARROW_OPTION_CATEGORIES}) + foreach(name ${ARROW_${category}_OPTION_NAMES}) + file(APPEND ${summary} "\"${name}\": \"${${name}}\",\n") + endforeach() + endforeach() + + file(APPEND ${summary} "\"generator\": \"${CMAKE_GENERATOR}\",\n") + file(APPEND ${summary} "\"build_type\": \"${CMAKE_BUILD_TYPE}\",\n") + file(APPEND ${summary} "\"source_dir\": \"${CMAKE_CURRENT_SOURCE_DIR}\",\n") + if(${CMAKE_EXPORT_COMPILE_COMMANDS}) + file(APPEND ${summary} "\"compile_commands\": " + "\"${CMAKE_CURRENT_BINARY_DIR}/compile_commands.json\",\n") + endif() + file(APPEND ${summary} "\"install_prefix\": \"${CMAKE_INSTALL_PREFIX}\",\n") + file(APPEND ${summary} "\"arrow_version\": \"${ARROW_VERSION}\"\n") + file(APPEND ${summary} "}\n") +endmacro() + +macro(config_summary_cmake_setters path) + file(WRITE ${path} "# Options used to build arrow:") + + foreach(category ${ARROW_OPTION_CATEGORIES}) + file(APPEND ${path} "\n\n## ${category} options:") + foreach(name ${ARROW_${category}_OPTION_NAMES}) + set(description ${${name}_OPTION_DESCRIPTION}) + foreach(description_line ${description}) + file(APPEND ${path} "\n### ${description_line}") + endforeach() + file(APPEND ${path} "\nset(${name} \"${${name}}\")") + endforeach() + endforeach() + +endmacro() + +#---------------------------------------------------------------------- +# Compute default values for omitted variables + +if(NOT ARROW_GIT_ID) + execute_process(COMMAND "git" "log" "-n1" "--format=%H" + WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} + OUTPUT_VARIABLE ARROW_GIT_ID + OUTPUT_STRIP_TRAILING_WHITESPACE) +endif() +if(NOT ARROW_GIT_DESCRIPTION) + execute_process(COMMAND "git" "describe" "--tags" + WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} + ERROR_QUIET + OUTPUT_VARIABLE ARROW_GIT_DESCRIPTION + OUTPUT_STRIP_TRAILING_WHITESPACE) +endif() diff --git a/python/cmake_modules/FindAWSSDKAlt.cmake b/python/cmake_modules/FindAWSSDKAlt.cmake new file mode 100644 index 000000000000..611184aa1d17 --- /dev/null +++ b/python/cmake_modules/FindAWSSDKAlt.cmake @@ -0,0 +1,50 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +set(find_package_args) +if(AWSSDKAlt_FIND_VERSION) + list(APPEND find_package_args ${AWSSDKAlt_FIND_VERSION}) +endif() +if(AWSSDKAlt_FIND_QUIETLY) + list(APPEND find_package_args QUIET) +endif() +# See https://aws.amazon.com/blogs/developer/developer-experience-of-the-aws-sdk-for-c-now-simplified-by-cmake/ +# Workaround to force AWS CMake configuration to look for shared libraries +if(DEFINED ENV{CONDA_PREFIX}) + if(DEFINED BUILD_SHARED_LIBS) + set(BUILD_SHARED_LIBS_WAS_SET TRUE) + set(BUILD_SHARED_LIBS_KEEP ${BUILD_SHARED_LIBS}) + else() + set(BUILD_SHARED_LIBS_WAS_SET FALSE) + endif() + set(BUILD_SHARED_LIBS ON) +endif() +find_package(AWSSDK ${find_package_args} + COMPONENTS config + s3 + transfer + identity-management + sts) +# Restore previous value of BUILD_SHARED_LIBS +if(DEFINED ENV{CONDA_PREFIX}) + if(BUILD_SHARED_LIBS_WAS_SET) + set(BUILD_SHARED_LIBS ${BUILD_SHARED_LIBS_KEEP}) + else() + unset(BUILD_SHARED_LIBS) + endif() +endif() +set(AWSSDKAlt_FOUND ${AWSSDK_FOUND}) diff --git a/python/cmake_modules/FindAzure.cmake b/python/cmake_modules/FindAzure.cmake new file mode 100644 index 000000000000..fdf354b724e7 --- /dev/null +++ b/python/cmake_modules/FindAzure.cmake @@ -0,0 +1,45 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +if(Azure_FOUND) + return() +endif() + +set(find_package_args) +list(APPEND find_package_args CONFIG) +if(Azure_FIND_QUIETLY) + list(APPEND find_package_args QUIET) +endif() + +if(Azure_FIND_REQUIRED) + list(APPEND find_package_args REQUIRED) +endif() + +find_package(azure-core-cpp ${find_package_args}) +find_package(azure-identity-cpp ${find_package_args}) +find_package(azure-storage-blobs-cpp ${find_package_args}) +find_package(azure-storage-common-cpp ${find_package_args}) +find_package(azure-storage-files-datalake-cpp ${find_package_args}) + +find_package_handle_standard_args( + Azure + REQUIRED_VARS azure-core-cpp_FOUND + azure-identity-cpp_FOUND + azure-storage-blobs-cpp_FOUND + azure-storage-common-cpp_FOUND + azure-storage-files-datalake-cpp_FOUND + VERSION_VAR azure-core-cpp_VERSION) diff --git a/python/cmake_modules/FindBrotliAlt.cmake b/python/cmake_modules/FindBrotliAlt.cmake new file mode 100644 index 000000000000..aa7495664861 --- /dev/null +++ b/python/cmake_modules/FindBrotliAlt.cmake @@ -0,0 +1,165 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Tries to find Brotli headers and libraries. +# +# Usage of this module as follows: +# +# find_package(BrotliAlt) + +if(BrotliAlt_FOUND) + return() +endif() + +if(ARROW_VCPKG OR ARROW_PACKAGE_KIND STREQUAL "conan") + set(find_package_args "") + if(BrotliAlt_FIND_VERSION) + list(APPEND find_package_args ${BrotliAlt_FIND_VERSION}) + endif() + if(BrotliAlt_FIND_QUIETLY) + list(APPEND find_package_args QUIET) + endif() + if(BrotliAlt_FIND_REQUIRED) + list(APPEND find_package_args REQUIRED) + endif() + if(ARROW_VCPKG) + find_package(BrotliAlt NAMES unofficial-brotli ${find_package_args}) + else() + find_package(BrotliAlt NAMES brotli ${find_package_args}) + endif() + set(Brotli_FOUND ${BrotliAlt_FOUND}) + if(BrotliAlt_FOUND) + if(ARROW_VCPKG) + add_library(Brotli::brotlicommon ALIAS unofficial::brotli::brotlicommon) + add_library(Brotli::brotlienc ALIAS unofficial::brotli::brotlienc) + add_library(Brotli::brotlidec ALIAS unofficial::brotli::brotlidec) + else() + add_library(Brotli::brotlicommon ALIAS brotli::brotlicommon) + add_library(Brotli::brotlienc ALIAS brotli::brotlienc) + add_library(Brotli::brotlidec ALIAS brotli::brotlidec) + endif() + return() + endif() +endif() + +if(ARROW_BROTLI_USE_SHARED) + set(BROTLI_COMMON_LIB_NAMES + brotlicommon + ${CMAKE_SHARED_LIBRARY_PREFIX}brotlicommon${CMAKE_SHARED_LIBRARY_SUFFIX}) + + set(BROTLI_ENC_LIB_NAMES + brotlienc ${CMAKE_SHARED_LIBRARY_PREFIX}brotlienc${CMAKE_SHARED_LIBRARY_SUFFIX}) + + set(BROTLI_DEC_LIB_NAMES + brotlidec ${CMAKE_SHARED_LIBRARY_PREFIX}brotlidec${CMAKE_SHARED_LIBRARY_SUFFIX}) +else() + set(BROTLI_COMMON_LIB_NAMES + brotlicommon-static + ${CMAKE_STATIC_LIBRARY_PREFIX}brotlicommon-static${CMAKE_STATIC_LIBRARY_SUFFIX} + ${CMAKE_STATIC_LIBRARY_PREFIX}brotlicommon_static${CMAKE_STATIC_LIBRARY_SUFFIX} + ${CMAKE_STATIC_LIBRARY_PREFIX}brotlicommon${CMAKE_STATIC_LIBRARY_SUFFIX}) + + set(BROTLI_ENC_LIB_NAMES + brotlienc-static + ${CMAKE_STATIC_LIBRARY_PREFIX}brotlienc-static${CMAKE_STATIC_LIBRARY_SUFFIX} + ${CMAKE_STATIC_LIBRARY_PREFIX}brotlienc_static${CMAKE_STATIC_LIBRARY_SUFFIX} + ${CMAKE_STATIC_LIBRARY_PREFIX}brotlienc${CMAKE_STATIC_LIBRARY_SUFFIX}) + + set(BROTLI_DEC_LIB_NAMES + brotlidec-static + ${CMAKE_STATIC_LIBRARY_PREFIX}brotlidec-static${CMAKE_STATIC_LIBRARY_SUFFIX} + ${CMAKE_STATIC_LIBRARY_PREFIX}brotlidec_static${CMAKE_STATIC_LIBRARY_SUFFIX} + ${CMAKE_STATIC_LIBRARY_PREFIX}brotlidec${CMAKE_STATIC_LIBRARY_SUFFIX}) +endif() + +if(BROTLI_ROOT) + find_library(BROTLI_COMMON_LIBRARY + NAMES ${BROTLI_COMMON_LIB_NAMES} + PATHS ${BROTLI_ROOT} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES} + NO_DEFAULT_PATH) + find_library(BROTLI_ENC_LIBRARY + NAMES ${BROTLI_ENC_LIB_NAMES} + PATHS ${BROTLI_ROOT} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES} + NO_DEFAULT_PATH) + find_library(BROTLI_DEC_LIBRARY + NAMES ${BROTLI_DEC_LIB_NAMES} + PATHS ${BROTLI_ROOT} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES} + NO_DEFAULT_PATH) + find_path(BROTLI_INCLUDE_DIR + NAMES brotli/decode.h + PATHS ${BROTLI_ROOT} + PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES} + NO_DEFAULT_PATH) +else() + find_package(PkgConfig QUIET) + pkg_check_modules(BROTLI_PC libbrotlicommon libbrotlienc libbrotlidec) + if(BROTLI_PC_FOUND) + set(BROTLI_INCLUDE_DIR "${BROTLI_PC_libbrotlicommon_INCLUDEDIR}") + + # Some systems (e.g. Fedora) don't fill Brotli_LIBRARY_DIRS, so add the other dirs here. + list(APPEND BROTLI_PC_LIBRARY_DIRS "${BROTLI_PC_libbrotlicommon_LIBDIR}") + list(APPEND BROTLI_PC_LIBRARY_DIRS "${BROTLI_PC_libbrotlienc_LIBDIR}") + list(APPEND BROTLI_PC_LIBRARY_DIRS "${BROTLI_PC_libbrotlidec_LIBDIR}") + + find_library(BROTLI_COMMON_LIBRARY + NAMES ${BROTLI_COMMON_LIB_NAMES} + PATHS ${BROTLI_PC_LIBRARY_DIRS} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES} + NO_DEFAULT_PATH) + find_library(BROTLI_ENC_LIBRARY + NAMES ${BROTLI_ENC_LIB_NAMES} + PATHS ${BROTLI_PC_LIBRARY_DIRS} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES} + NO_DEFAULT_PATH) + find_library(BROTLI_DEC_LIBRARY + NAMES ${BROTLI_DEC_LIB_NAMES} + PATHS ${BROTLI_PC_LIBRARY_DIRS} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES} + NO_DEFAULT_PATH) + else() + find_library(BROTLI_COMMON_LIBRARY + NAMES ${BROTLI_COMMON_LIB_NAMES} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES}) + find_library(BROTLI_ENC_LIBRARY + NAMES ${BROTLI_ENC_LIB_NAMES} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES}) + find_library(BROTLI_DEC_LIBRARY + NAMES ${BROTLI_DEC_LIB_NAMES} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES}) + find_path(BROTLI_INCLUDE_DIR + NAMES brotli/decode.h + PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES}) + endif() +endif() + +find_package_handle_standard_args( + BrotliAlt REQUIRED_VARS BROTLI_COMMON_LIBRARY BROTLI_ENC_LIBRARY BROTLI_DEC_LIBRARY + BROTLI_INCLUDE_DIR) +set(Brotli_FOUND ${BrotliAlt_FOUND}) +if(BrotliAlt_FOUND) + add_library(Brotli::brotlicommon UNKNOWN IMPORTED) + set_target_properties(Brotli::brotlicommon + PROPERTIES IMPORTED_LOCATION "${BROTLI_COMMON_LIBRARY}" + INTERFACE_INCLUDE_DIRECTORIES "${BROTLI_INCLUDE_DIR}") + add_library(Brotli::brotlienc UNKNOWN IMPORTED) + set_target_properties(Brotli::brotlienc + PROPERTIES IMPORTED_LOCATION "${BROTLI_ENC_LIBRARY}" + INTERFACE_INCLUDE_DIRECTORIES "${BROTLI_INCLUDE_DIR}") + add_library(Brotli::brotlidec UNKNOWN IMPORTED) + set_target_properties(Brotli::brotlidec + PROPERTIES IMPORTED_LOCATION "${BROTLI_DEC_LIBRARY}" + INTERFACE_INCLUDE_DIRECTORIES "${BROTLI_INCLUDE_DIR}") +endif() diff --git a/python/cmake_modules/FindClangTools.cmake b/python/cmake_modules/FindClangTools.cmake new file mode 100644 index 000000000000..1364ccbed816 --- /dev/null +++ b/python/cmake_modules/FindClangTools.cmake @@ -0,0 +1,122 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Tries to find the clang-tidy and clang-format modules +# +# Usage of this module as follows: +# +# find_package(ClangTools) +# +# Variables used by this module which can change the default behaviour and need +# to be set before calling find_package: +# +# CLANG_FORMAT_VERSION - +# The version of clang-format to find. If this is not specified, clang-format +# will not be searched for. +# +# ClangTools_PATH - +# When set, this path is inspected in addition to standard library binary locations +# to find clang-tidy and clang-format +# +# This module defines +# CLANG_TIDY_BIN, The path to the clang tidy binary +# CLANG_TIDY_FOUND, Whether clang tidy was found +# CLANG_FORMAT_BIN, The path to the clang format binary +# CLANG_FORMAT_FOUND, Whether clang format was found + +set(CLANG_TOOLS_SEARCH_PATHS + ${ClangTools_PATH} + $ENV{CLANG_TOOLS_PATH} + /usr/local/bin + /usr/bin + "C:/Program Files/LLVM/bin" # Windows, non-conda + "$ENV{CONDA_PREFIX}/Library/bin" # Windows, conda + "$ENV{CONDA_PREFIX}/bin") # Unix, conda +if(APPLE) + find_program(BREW brew) + if(BREW) + execute_process(COMMAND ${BREW} --prefix "llvm@${ARROW_CLANG_TOOLS_VERSION_MAJOR}" + OUTPUT_VARIABLE CLANG_TOOLS_BREW_PREFIX + OUTPUT_STRIP_TRAILING_WHITESPACE) + if(NOT CLANG_TOOLS_BREW_PREFIX) + execute_process(COMMAND ${BREW} --prefix llvm + OUTPUT_VARIABLE CLANG_TOOLS_BREW_PREFIX + OUTPUT_STRIP_TRAILING_WHITESPACE) + endif() + if(CLANG_TOOLS_BREW_PREFIX) + list(APPEND CLANG_TOOLS_SEARCH_PATHS "${CLANG_TOOLS_BREW_PREFIX}/bin") + endif() + endif() +endif() + +function(FIND_CLANG_TOOL NAME OUTPUT VERSION_CHECK_PATTERN) + unset(CLANG_TOOL_BIN CACHE) + find_program(CLANG_TOOL_BIN + NAMES ${NAME}-${ARROW_CLANG_TOOLS_VERSION} + ${NAME}-${ARROW_CLANG_TOOLS_VERSION_MAJOR} + PATHS ${CLANG_TOOLS_SEARCH_PATHS} + NO_DEFAULT_PATH) + if(NOT CLANG_TOOL_BIN) + # try searching for non-versioned tool and check the version + find_program(CLANG_TOOL_BIN + NAMES ${NAME} + PATHS ${CLANG_TOOLS_SEARCH_PATHS} + NO_DEFAULT_PATH) + if(CLANG_TOOL_BIN) + unset(CLANG_TOOL_VERSION_MESSAGE) + execute_process(COMMAND ${CLANG_TOOL_BIN} "-version" + OUTPUT_VARIABLE CLANG_TOOL_VERSION_MESSAGE + OUTPUT_STRIP_TRAILING_WHITESPACE) + if(NOT (${CLANG_TOOL_VERSION_MESSAGE} MATCHES ${VERSION_CHECK_PATTERN})) + message(STATUS "${NAME} found, but version did not match \"${VERSION_CHECK_PATTERN}\"" + ) + set(CLANG_TOOL_BIN "CLANG_TOOL_BIN-NOTFOUND") + endif() + endif() + endif() + if(CLANG_TOOL_BIN) + set(${OUTPUT} + ${CLANG_TOOL_BIN} + PARENT_SCOPE) + else() + set(${OUTPUT} + "${OUTPUT}-NOTFOUND" + PARENT_SCOPE) + endif() +endfunction() + +string(REGEX REPLACE "\\." "\\\\." ARROW_CLANG_TOOLS_VERSION_ESCAPED + "${ARROW_CLANG_TOOLS_VERSION}") + +find_clang_tool(clang-tidy CLANG_TIDY_BIN + "LLVM version ${ARROW_CLANG_TOOLS_VERSION_ESCAPED}") +if(CLANG_TIDY_BIN) + set(CLANG_TIDY_FOUND 1) + message(STATUS "clang-tidy found at ${CLANG_TIDY_BIN}") +else() + set(CLANG_TIDY_FOUND 0) + message(STATUS "clang-tidy ${ARROW_CLANG_TOOLS_VERSION} not found") +endif() + +find_clang_tool(clang-format CLANG_FORMAT_BIN + "clang-format version ${ARROW_CLANG_TOOLS_VERSION_ESCAPED}") +if(CLANG_FORMAT_BIN) + set(CLANG_FORMAT_FOUND 1) + message(STATUS "clang-format found at ${CLANG_FORMAT_BIN}") +else() + set(CLANG_FORMAT_FOUND 0) + message(STATUS "clang-format ${ARROW_CLANG_TOOLS_VERSION} not found") +endif() + +find_package_handle_standard_args(ClangTools REQUIRED_VARS CLANG_FORMAT_BIN + CLANG_TIDY_BIN) diff --git a/python/cmake_modules/FindGTestAlt.cmake b/python/cmake_modules/FindGTestAlt.cmake new file mode 100644 index 000000000000..d1873d138e6c --- /dev/null +++ b/python/cmake_modules/FindGTestAlt.cmake @@ -0,0 +1,69 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +if(GTestAlt_FOUND) + return() +endif() + +set(find_package_args) +if(GTestAlt_FIND_VERSION) + list(APPEND find_package_args ${GTestAlt_FIND_VERSION}) +endif() +if(GTestAlt_FIND_QUIETLY) + list(APPEND find_package_args QUIET) +endif() +if(CMAKE_VERSION VERSION_LESS 3.23) + list(APPEND find_package_args CONFIG) +endif() +# We can't find shred library version of GoogleTest on Windows with +# Conda's gtest package because it doesn't provide GTestConfig.cmake +# provided by GoogleTest and CMake's built-in FindGTtest.cmake +# doesn't support gtest_dll.dll. +find_package(GTest ${find_package_args}) + +set(GTestAlt_FOUND ${GTest_FOUND}) +if(GTestAlt_FOUND AND GTestAlt_NEED_CXX_STANDARD_CHECK) + set(KEEP_CMAKE_TRY_COMPILE_TARGET_TYPE ${CMAKE_TRY_COMPILE_TARGET_TYPE}) + set(CMAKE_TRY_COMPILE_TARGET_TYPE EXECUTABLE) + set(GTestAlt_CXX_STANDARD_TEST_SOURCE + "${CMAKE_CURRENT_BINARY_DIR}/gtest_cxx_standard_test.cc") + file(WRITE ${GTestAlt_CXX_STANDARD_TEST_SOURCE} + " +#include +#include + +TEST(CXX_STANDARD, MatcherStringView) { + testing::Matcher matcher(std::string_view(\"hello\")); +} + ") + try_compile(GTestAlt_CXX_STANDARD_AVAILABLE ${CMAKE_CURRENT_BINARY_DIR} + SOURCES ${GTestAlt_CXX_STANDARD_TEST_SOURCE} + CMAKE_FLAGS "-DCMAKE_CXX_STANDARD=${CMAKE_CXX_STANDARD}" + LINK_LIBRARIES GTest::gtest_main + OUTPUT_VARIABLE GTestAlt_CXX_STANDARD_OUTPUT) + set(CMAKE_TRY_COMPILE_TARGET_TYPE ${KEEP_CMAKE_TRY_COMPILE_TARGET_TYPE}) + if(NOT GTestAlt_CXX_STANDARD_AVAILABLE) + message(STATUS "GTest can't be used with C++${CMAKE_CXX_STANDARD}.") + message(STATUS "Use -DGTest_SOURCE=BUNDLED.") + message(STATUS "Output:\n${GTestAlt_CXX_STANDARD_OUTPUT}") + find_package_handle_standard_args(GTestAlt + REQUIRED_VARS GTestAlt_CXX_STANDARD_AVAILABLE) + endif() + + target_link_libraries(GTest::gmock INTERFACE GTest::gtest) + target_link_libraries(GTest::gtest_main INTERFACE GTest::gtest) +endif() diff --git a/python/cmake_modules/FindInferTools.cmake b/python/cmake_modules/FindInferTools.cmake new file mode 100644 index 000000000000..c4b65653ae9a --- /dev/null +++ b/python/cmake_modules/FindInferTools.cmake @@ -0,0 +1,47 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Tries to find the infer module +# +# Usage of this module as follows: +# +# find_package(InferTools) +# +# Variables used by this module, they can change the default behaviour and need +# to be set before calling find_package: +# +# InferTools_PATH - +# When set, this path is inspected instead of standard library binary locations +# to find infer +# +# This module defines +# INFER_BIN, The path to the infer binary +# INFER_FOUND, Whether infer was found + +find_program(INFER_BIN + NAMES infer + PATHS ${InferTools_PATH} + $ENV{INFER_TOOLS_PATH} + /usr/local/bin + /usr/bin + /usr/local/homebrew/bin + /opt/local/bin + NO_DEFAULT_PATH) + +if("${INFER_BIN}" STREQUAL "INFER_BIN-NOTFOUND") + set(INFER_FOUND 0) + message(STATUS "infer not found") +else() + set(INFER_FOUND 1) + message(STATUS "infer found at ${INFER_BIN}") +endif() diff --git a/python/cmake_modules/FindLLVMAlt.cmake b/python/cmake_modules/FindLLVMAlt.cmake new file mode 100644 index 000000000000..2730f829817f --- /dev/null +++ b/python/cmake_modules/FindLLVMAlt.cmake @@ -0,0 +1,133 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# Usage of this module as follows: +# +# find_package(LLVMAlt) + +if(LLVMAlt_FOUND) + return() +endif() + +if(DEFINED LLVM_ROOT) + # if llvm source is set to conda then prefer conda llvm over system llvm even + # if the system one is newer + foreach(ARROW_LLVM_VERSION ${ARROW_LLVM_VERSIONS}) + find_package(LLVM + ${ARROW_LLVM_VERSION} + CONFIG + NO_DEFAULT_PATH + HINTS + ${LLVM_ROOT}) + if(LLVM_FOUND) + break() + endif() + endforeach() +endif() + +if(NOT LLVM_FOUND) + foreach(ARROW_LLVM_VERSION ${ARROW_LLVM_VERSIONS}) + set(LLVM_HINTS ${LLVM_ROOT} ${LLVM_DIR} /usr/lib /usr/share) + + if(APPLE) + find_program(BREW brew) + if(BREW) + string(REGEX REPLACE "^([0-9]+)(\\..+)?" "\\1" ARROW_LLVM_VERSION_MAJOR + "${ARROW_LLVM_VERSION}") + execute_process(COMMAND ${BREW} --prefix "llvm@${ARROW_LLVM_VERSION_MAJOR}" + OUTPUT_VARIABLE LLVM_BREW_PREFIX + OUTPUT_STRIP_TRAILING_WHITESPACE) + list(APPEND LLVM_HINTS ${LLVM_BREW_PREFIX}) + endif() + endif() + + find_package(LLVM + ${ARROW_LLVM_VERSION} + CONFIG + HINTS + ${LLVM_HINTS}) + + if(LLVM_FOUND) + break() + endif() + endforeach() +endif() + +if(LLVM_FOUND) + find_program(LLVM_LINK_EXECUTABLE llvm-link HINTS ${LLVM_TOOLS_BINARY_DIR}) + + find_program(CLANG_EXECUTABLE + NAMES clang-${LLVM_PACKAGE_VERSION} + clang-${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR} + clang-${LLVM_VERSION_MAJOR} clang + HINTS ${LLVM_TOOLS_BINARY_DIR}) + + add_library(LLVM::LLVM_HEADERS INTERFACE IMPORTED) + set_target_properties(LLVM::LLVM_HEADERS + PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${LLVM_INCLUDE_DIRS}" + INTERFACE_COMPILE_FLAGS "${LLVM_DEFINITIONS}") + + add_library(LLVM::LLVM_LIBS INTERFACE IMPORTED) + if(ARROW_LLVM_USE_SHARED) + target_link_libraries(LLVM::LLVM_LIBS INTERFACE LLVM) + else() + # Find the libraries that correspond to the LLVM components + set(LLVM_TARGET_COMPONENTS + analysis + bitreader + core + debuginfodwarf + ipo + linker + native + orcjit + target) + if(LLVM_VERSION_MAJOR GREATER_EQUAL 14) + list(APPEND LLVM_TARGET_COMPONENTS passes) + endif() + llvm_map_components_to_libnames(LLVM_LIBS ${LLVM_TARGET_COMPONENTS}) + target_link_libraries(LLVM::LLVM_LIBS INTERFACE ${LLVM_LIBS}) + + if(TARGET LLVMSupport AND NOT ARROW_ZSTD_USE_SHARED) + get_target_property(LLVM_SUPPORT_INTERFACE_LINK_LIBRARIES LLVMSupport + INTERFACE_LINK_LIBRARIES) + list(FIND LLVM_SUPPORT_INTERFACE_LINK_LIBRARIES zstd::libzstd_shared + LLVM_SUPPORT_LIBZSTD_INDEX) + if(NOT LLVM_SUPPORT_LIBZSTD_INDEX EQUAL -1) + list(REMOVE_AT LLVM_SUPPORT_INTERFACE_LINK_LIBRARIES + ${LLVM_SUPPORT_LIBZSTD_INDEX}) + list(INSERT LLVM_SUPPORT_INTERFACE_LINK_LIBRARIES ${LLVM_SUPPORT_LIBZSTD_INDEX} + zstd::libzstd_static) + endif() + set_target_properties(LLVMSupport + PROPERTIES INTERFACE_LINK_LIBRARIES + "${LLVM_SUPPORT_INTERFACE_LINK_LIBRARIES}") + endif() + endif() +endif() + +mark_as_advanced(CLANG_EXECUTABLE LLVM_LINK_EXECUTABLE) + +find_package_handle_standard_args( + LLVMAlt + REQUIRED_VARS # The first variable is used for display. + LLVM_PACKAGE_VERSION CLANG_EXECUTABLE LLVM_FOUND LLVM_LINK_EXECUTABLE) +if(LLVMAlt_FOUND) + message(STATUS "Using LLVMConfig.cmake in: ${LLVM_DIR}") + message(STATUS "Found llvm-link ${LLVM_LINK_EXECUTABLE}") + message(STATUS "Found clang ${CLANG_EXECUTABLE}") +endif() diff --git a/python/cmake_modules/FindOpenSSLAlt.cmake b/python/cmake_modules/FindOpenSSLAlt.cmake new file mode 100644 index 000000000000..03c61e6ea2ad --- /dev/null +++ b/python/cmake_modules/FindOpenSSLAlt.cmake @@ -0,0 +1,55 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +if(OpenSSLAlt_FOUND) + return() +endif() + +if(APPLE AND NOT OPENSSL_ROOT_DIR) + find_program(BREW brew) + if(BREW) + foreach(BREW_OPENSSL_VERSION "" "3" "3.0" "1.1") + set(BREW_OPENSSL_PACKAGE "openssl") + if(BREW_OPENSSL_VERSION) + string(APPEND BREW_OPENSSL_PACKAGE "@${BREW_OPENSSL_VERSION}") + endif() + execute_process(COMMAND ${BREW} --prefix --installed ${BREW_OPENSSL_PACKAGE} + OUTPUT_VARIABLE BREW_OPENSSL_PREFIX + OUTPUT_STRIP_TRAILING_WHITESPACE) + if(BREW_OPENSSL_PREFIX) + set(OPENSSL_ROOT_DIR ${BREW_OPENSSL_PREFIX}) + break() + endif() + endforeach() + endif() +endif() + +set(find_package_args) +if(OpenSSLAlt_FIND_VERSION) + list(APPEND find_package_args ${OpenSSLAlt_FIND_VERSION}) +endif() +if(OpenSSLAlt_FIND_QUIETLY) + list(APPEND find_package_args QUIET) +endif() +if(ARROW_OPENSSL_USE_SHARED) + set(OPENSSL_USE_STATIC_LIBS OFF) +else() + set(OPENSSL_USE_STATIC_LIBS ON) +endif() +find_package(OpenSSL ${find_package_args}) + +set(OpenSSLAlt_FOUND ${OPENSSL_FOUND}) diff --git a/python/cmake_modules/FindProtobufAlt.cmake b/python/cmake_modules/FindProtobufAlt.cmake new file mode 100644 index 000000000000..703e05c4731b --- /dev/null +++ b/python/cmake_modules/FindProtobufAlt.cmake @@ -0,0 +1,54 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +if(ARROW_PROTOBUF_USE_SHARED) + set(Protobuf_USE_STATIC_LIBS OFF) +else() + set(Protobuf_USE_STATIC_LIBS ON) +endif() + +set(find_package_args) +if(ProtobufAlt_FIND_VERSION) + list(APPEND find_package_args ${ProtobufAlt_FIND_VERSION}) +endif() +if(ProtobufAlt_FIND_QUIETLY) + list(APPEND find_package_args QUIET) +endif() +find_package(protobuf CONFIG ${find_package_args}) +set(ProtobufAlt_FOUND ${protobuf_FOUND}) +if(ProtobufAlt_FOUND) + if(Protobuf_PROTOC_EXECUTABLE) + # work around https://github.com/protocolbuffers/protobuf/issues/14576 + set_target_properties(protobuf::protoc PROPERTIES IMPORTED_LOCATION_RELEASE + "${Protobuf_PROTOC_EXECUTABLE}") + endif() + set(ProtobufAlt_VERSION ${protobuf_VERSION}) + set(ProtobufAlt_VERSION_MAJOR ${protobuf_VERSION_MAJOR}) + set(ProtobufAlt_VERSION_MINOR ${protobuf_VERSION_MINOR}) + set(ProtobufAlt_VERSION_PATCH ${protobuf_VERSION_PATCH}) + set(ProtobufAlt_VERSION_TWEEK ${protobuf_VERSION_TWEEK}) +else() + find_package(Protobuf ${find_package_args}) + set(ProtobufAlt_FOUND ${Protobuf_FOUND}) + if(ProtobufAlt_FOUND) + set(ProtobufAlt_VERSION ${Protobuf_VERSION}) + set(ProtobufAlt_VERSION_MAJOR ${Protobuf_VERSION_MAJOR}) + set(ProtobufAlt_VERSION_MINOR ${Protobuf_VERSION_MINOR}) + set(ProtobufAlt_VERSION_PATCH ${Protobuf_VERSION_PATCH}) + set(ProtobufAlt_VERSION_TWEEK ${Protobuf_VERSION_TWEEK}) + endif() +endif() diff --git a/python/cmake_modules/FindPython3Alt.cmake b/python/cmake_modules/FindPython3Alt.cmake new file mode 100644 index 000000000000..a057be8da631 --- /dev/null +++ b/python/cmake_modules/FindPython3Alt.cmake @@ -0,0 +1,89 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This module finds the libraries corresponding to the Python 3 interpreter +# and the NumPy package, and sets the following variables: +# - PYTHON_EXECUTABLE +# - PYTHON_INCLUDE_DIRS +# - PYTHON_LIBRARIES +# - PYTHON_OTHER_LIBS +# - NUMPY_INCLUDE_DIRS + +if(Python3Alt_FOUND) + return() +endif() + +set(Python3Alt_FIND_PACKAGE_OPTIONS) +set(Python3Alt_NumPy_FIND_PACKAGE_OPTIONS) +if(Python3Alt_FIND_VERSION) + list(APPEND Python3Alt_FIND_PACKAGE_OPTIONS ${Python3Alt_FIND_VERSION}) +endif() +if(Python3Alt_FIND_REQUIRED) + list(APPEND Python3Alt_FIND_PACKAGE_OPTIONS REQUIRED) + list(APPEND Python3Alt_NumPy_FIND_PACKAGE_OPTIONS REQUIRED) +endif() +if(Python3Alt_FIND_QUIETLY) + list(APPEND Python3Alt_FIND_PACKAGE_OPTIONS QUIET) + list(APPEND Python3Alt_NumPy_FIND_PACKAGE_OPTIONS QUIET) +endif() + +if(CMAKE_VERSION VERSION_LESS 3.18.0) + # We need libpython to be present, so ask for the full "Development" + # component on CMake < 3.18, where "Development.Module" is not + # available. + find_package(Python3 ${Python3Alt_FIND_PACKAGE_OPTIONS} COMPONENTS Interpreter + Development NumPy) +else() + find_package(Python3 ${Python3Alt_FIND_PACKAGE_OPTIONS} + COMPONENTS Interpreter Development.Module NumPy) +endif() + +if(NOT Python3_FOUND) + return() +endif() + +set(PYTHON_EXECUTABLE ${Python3_EXECUTABLE}) +set(PYTHON_INCLUDE_DIRS ${Python3_INCLUDE_DIRS}) +set(PYTHON_LIBRARIES ${Python3_LIBRARIES}) +set(PYTHON_OTHER_LIBS) + +get_target_property(NUMPY_INCLUDE_DIRS Python3::NumPy INTERFACE_INCLUDE_DIRECTORIES) + +# CMake's python3_add_library() doesn't apply the required extension suffix, +# detect it ourselves. +# (https://gitlab.kitware.com/cmake/cmake/issues/20408) +execute_process(COMMAND "${PYTHON_EXECUTABLE}" "-c" + "import sysconfig; print(sysconfig.get_config_var('EXT_SUFFIX'))" + RESULT_VARIABLE _PYTHON_RESULT + OUTPUT_VARIABLE _PYTHON_STDOUT + ERROR_VARIABLE _PYTHON_STDERR) + +if(NOT _PYTHON_RESULT MATCHES 0) + if(Python3Alt_FIND_REQUIRED) + message(FATAL_ERROR "Python 3 config failure:\n${_PYTHON_STDERR}") + endif() +endif() + +string(STRIP ${_PYTHON_STDOUT} _EXT_SUFFIX) + +function(PYTHON_ADD_MODULE name) + python3_add_library(${name} MODULE ${ARGN}) + set_target_properties(${name} PROPERTIES SUFFIX ${_EXT_SUFFIX}) +endfunction() + +find_package_handle_standard_args( + Python3Alt REQUIRED_VARS PYTHON_EXECUTABLE PYTHON_INCLUDE_DIRS NUMPY_INCLUDE_DIRS) diff --git a/python/cmake_modules/FindRapidJSONAlt.cmake b/python/cmake_modules/FindRapidJSONAlt.cmake new file mode 100644 index 000000000000..babb450e204a --- /dev/null +++ b/python/cmake_modules/FindRapidJSONAlt.cmake @@ -0,0 +1,94 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +if(RapidJSONAlt_FOUND) + return() +endif() + +set(find_package_args) +if(RapidJSONAlt_FIND_VERSION) + list(APPEND find_package_args ${RapidJSONAlt_FIND_VERSION}) +endif() +if(RapidJSONAlt_FIND_QUIETLY) + list(APPEND find_package_args QUIET) +endif() +find_package(RapidJSON ${find_package_args}) +if(RapidJSON_FOUND) + set(RapidJSONAlt_FOUND TRUE) + if(NOT TARGET RapidJSON) + add_library(RapidJSON INTERFACE IMPORTED) + if(RapidJSON_INCLUDE_DIRS) + target_include_directories(RapidJSON INTERFACE "${RapidJSON_INCLUDE_DIRS}") + else() + target_include_directories(RapidJSON INTERFACE "${RAPIDJSON_INCLUDE_DIRS}") + endif() + endif() + return() +endif() + +if(RapidJSON_ROOT) + find_path(RAPIDJSON_INCLUDE_DIR + NAMES rapidjson/rapidjson.h + PATHS ${RapidJSON_ROOT} + NO_DEFAULT_PATH + PATH_SUFFIXES "include") +else() + find_path(RAPIDJSON_INCLUDE_DIR + NAMES rapidjson/rapidjson.h + PATH_SUFFIXES "include") +endif() + +if(RAPIDJSON_INCLUDE_DIR) + file(READ "${RAPIDJSON_INCLUDE_DIR}/rapidjson/rapidjson.h" RAPIDJSON_H_CONTENT) + string(REGEX MATCH "#define RAPIDJSON_MAJOR_VERSION ([0-9]+)" + RAPIDJSON_MAJOR_VERSION_DEFINITION "${RAPIDJSON_H_CONTENT}") + string(REGEX REPLACE "^.+ ([0-9]+)$" "\\1" RAPIDJSON_MAJOR_VERSION + "${RAPIDJSON_MAJOR_VERSION_DEFINITION}") + string(REGEX MATCH "#define RAPIDJSON_MINOR_VERSION ([0-9]+)" + RAPIDJSON_MINOR_VERSION_DEFINITION "${RAPIDJSON_H_CONTENT}") + string(REGEX REPLACE "^.+ ([0-9]+)$" "\\1" RAPIDJSON_MINOR_VERSION + "${RAPIDJSON_MINOR_VERSION_DEFINITION}") + string(REGEX MATCH "#define RAPIDJSON_PATCH_VERSION ([0-9]+)" + RAPIDJSON_PATCH_VERSION_DEFINITION "${RAPIDJSON_H_CONTENT}") + string(REGEX REPLACE "^.+ ([0-9]+)$" "\\1" RAPIDJSON_PATCH_VERSION + "${RAPIDJSON_PATCH_VERSION_DEFINITION}") + if("${RAPIDJSON_MAJOR_VERSION}" STREQUAL "" + OR "${RAPIDJSON_MINOR_VERSION}" STREQUAL "" + OR "${RAPIDJSON_PATCH_VERSION}" STREQUAL "") + set(RAPIDJSON_VERSION "0.0.0") + else() + set(RAPIDJSON_VERSION + "${RAPIDJSON_MAJOR_VERSION}.${RAPIDJSON_MINOR_VERSION}.${RAPIDJSON_PATCH_VERSION}" + ) + endif() +endif() + +find_package_handle_standard_args( + RapidJSONAlt + REQUIRED_VARS RAPIDJSON_INCLUDE_DIR + VERSION_VAR RAPIDJSON_VERSION) + +if(RapidJSONAlt_FOUND) + if(WIN32 AND "${RAPIDJSON_INCLUDE_DIR}" MATCHES "^/") + # MSYS2 + execute_process(COMMAND "cygpath" "--windows" "${RAPIDJSON_INCLUDE_DIR}" + OUTPUT_VARIABLE RAPIDJSON_INCLUDE_DIR + OUTPUT_STRIP_TRAILING_WHITESPACE) + endif() + add_library(RapidJSON INTERFACE IMPORTED) + target_include_directories(RapidJSON INTERFACE "${RAPIDJSON_INCLUDE_DIR}") +endif() diff --git a/python/cmake_modules/FindSQLite3Alt.cmake b/python/cmake_modules/FindSQLite3Alt.cmake new file mode 100644 index 000000000000..b60939841ef3 --- /dev/null +++ b/python/cmake_modules/FindSQLite3Alt.cmake @@ -0,0 +1,47 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# Once done this will define +# - FindSQLite3Alt +# +# This module will set the following variables if found: +# SQLite3_INCLUDE_DIRS - SQLite3 include dir. +# SQLite3_LIBRARIES - List of libraries when using SQLite3. +# SQLite3_FOUND - True if SQLite3 found. +# +# Usage of this module as follows: +# find_package(SQLite3Alt) + +if(FindSQLite3Alt_FOUND) + return() +endif() + +find_path(SQLite3_INCLUDE_DIR sqlite3.h) +find_library(SQLite3_LIBRARY NAMES sqlite3) + +# handle the QUIETLY and REQUIRED arguments and set SQLite3_FOUND to TRUE if +# all listed variables are TRUE +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(SQLite3Alt REQUIRED_VARS SQLite3_LIBRARY + SQLite3_INCLUDE_DIR) + +mark_as_advanced(SQLite3_LIBRARY SQLite3_INCLUDE_DIR) + +if(SQLite3Alt_FOUND) + set(SQLite3_INCLUDE_DIRS ${SQLite3_INCLUDE_DIR}) + set(SQLite3_LIBRARIES ${SQLite3_LIBRARY}) +endif() diff --git a/python/cmake_modules/FindSnappyAlt.cmake b/python/cmake_modules/FindSnappyAlt.cmake new file mode 100644 index 000000000000..d0a06f0997ad --- /dev/null +++ b/python/cmake_modules/FindSnappyAlt.cmake @@ -0,0 +1,119 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +if(SnappyAlt_FOUND) + return() +endif() + +if(ARROW_SNAPPY_USE_SHARED) + if(TARGET Snappy::snappy) + set(Snappy_TARGET Snappy::snappy) + set(SnappyAlt_FOUND TRUE) + return() + elseif(TARGET Snappy::snappy-static) + set(Snappy_TARGET Snappy::snappy-static) + set(SnappyAlt_FOUND TRUE) + return() + endif() +endif() + +set(find_package_args) +if(SnappyAlt_FIND_VERSION) + list(APPEND find_package_args ${SnappyAlt_FIND_VERSION}) +endif() +if(SnappyAlt_FIND_QUIETLY) + list(APPEND find_package_args QUIET) +endif() +find_package(Snappy ${find_package_args}) +if(Snappy_FOUND) + if(ARROW_SNAPPY_USE_SHARED) + set(Snappy_TARGET Snappy::snappy) + set(SnappyAlt_FOUND TRUE) + return() + else() + if(TARGET Snappy::snappy-static) + # The official SnappyTargets.cmake uses Snappy::snappy-static for + # static version. + set(Snappy_TARGET Snappy::snappy-static) + set(SnappyAlt_FOUND TRUE) + return() + else() + # The Conan's Snappy package always uses Snappy::snappy and it's + # an INTERFACE_LIBRARY. + get_target_property(Snappy Snappy::snappy TYPE) + if(Snappy_TYPE STREQUAL "STATIC_LIBRARY" OR Snappy_TYPE STREQUAL + "INTERFACE_LIBRARY") + set(Snappy_TARGET Snappy::snappy) + set(SnappyAlt_FOUND TRUE) + return() + endif() + endif() + endif() +endif() + +if(ARROW_SNAPPY_USE_SHARED) + set(SNAPPY_LIB_NAMES) + if(CMAKE_IMPORT_LIBRARY_SUFFIX) + list(APPEND SNAPPY_LIB_NAMES + "${CMAKE_IMPORT_LIBRARY_PREFIX}snappy${CMAKE_IMPORT_LIBRARY_SUFFIX}") + endif() + list(APPEND SNAPPY_LIB_NAMES + "${CMAKE_SHARED_LIBRARY_PREFIX}snappy${CMAKE_SHARED_LIBRARY_SUFFIX}") +else() + set(SNAPPY_STATIC_LIB_NAME_BASE "snappy") + if(MSVC) + set(SNAPPY_STATIC_LIB_NAME_BASE + "${SNAPPY_STATIC_LIB_NAME_BASE}${SNAPPY_MSVC_STATIC_LIB_SUFFIX}") + endif() + set(SNAPPY_LIB_NAMES + "${CMAKE_STATIC_LIBRARY_PREFIX}${SNAPPY_STATIC_LIB_NAME_BASE}${CMAKE_STATIC_LIBRARY_SUFFIX}" + ) +endif() + +if(Snappy_ROOT) + find_library(Snappy_LIB + NAMES ${SNAPPY_LIB_NAMES} + PATHS ${Snappy_ROOT} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES} + NO_DEFAULT_PATH) + find_path(Snappy_INCLUDE_DIR + NAMES snappy.h + PATHS ${Snappy_ROOT} + NO_DEFAULT_PATH + PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES}) +else() + find_library(Snappy_LIB NAMES ${SNAPPY_LIB_NAMES}) + find_path(Snappy_INCLUDE_DIR + NAMES snappy.h + PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES}) +endif() + +find_package_handle_standard_args(SnappyAlt REQUIRED_VARS Snappy_LIB Snappy_INCLUDE_DIR) + +if(SnappyAlt_FOUND) + if(ARROW_SNAPPY_USE_SHARED) + set(Snappy_TARGET Snappy::snappy) + set(Snappy_TARGET_TYPE SHARED) + else() + set(Snappy_TARGET Snappy::snappy-static) + set(Snappy_TARGET_TYPE STATIC) + endif() + add_library(${Snappy_TARGET} ${Snappy_TARGET_TYPE} IMPORTED) + set_target_properties(${Snappy_TARGET} + PROPERTIES IMPORTED_LOCATION "${Snappy_LIB}" + INTERFACE_INCLUDE_DIRECTORIES "${Snappy_INCLUDE_DIR}") +endif() diff --git a/python/cmake_modules/FindThriftAlt.cmake b/python/cmake_modules/FindThriftAlt.cmake new file mode 100644 index 000000000000..0c5aed8e4e06 --- /dev/null +++ b/python/cmake_modules/FindThriftAlt.cmake @@ -0,0 +1,189 @@ +# Copyright 2012 Cloudera Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# - Find Thrift (a cross platform RPC lib/tool) +# +# Variables used by this module, they can change the default behaviour and need +# to be set before calling find_package: +# +# Thrift_ROOT - When set, this path is inspected instead of standard library +# locations as the root of the Thrift installation. +# The environment variable THRIFT_HOME overrides this variable. +# +# This module defines +# Thrift_FOUND, whether Thrift is found or not +# Thrift_COMPILER_FOUND, whether Thrift compiler is found or not +# +# thrift::thrift, a library target to use Thrift +# thrift::compiler, a executable target to use Thrift compiler + +if(ThriftAlt_FOUND) + return() +endif() + +set(find_package_args "") +if(ThriftAlt_FIND_VERSION) + list(APPEND find_package_args ${ThriftAlt_FIND_VERSION}) +endif() +if(ThriftAlt_FIND_QUIETLY) + list(APPEND find_package_args QUIET) +endif() +find_package(Thrift ${find_package_args}) +if(Thrift_FOUND) + set(ThriftAlt_FOUND TRUE) + add_executable(thrift::compiler IMPORTED) + set_target_properties(thrift::compiler PROPERTIES IMPORTED_LOCATION + "${THRIFT_COMPILER}") + return() +endif() + +function(extract_thrift_version) + if(ThriftAlt_INCLUDE_DIR) + file(READ "${ThriftAlt_INCLUDE_DIR}/thrift/config.h" THRIFT_CONFIG_H_CONTENT) + string(REGEX MATCH "#define PACKAGE_VERSION \"[0-9.]+\"" THRIFT_VERSION_DEFINITION + "${THRIFT_CONFIG_H_CONTENT}") + string(REGEX MATCH "[0-9.]+" ThriftAlt_VERSION "${THRIFT_VERSION_DEFINITION}") + set(ThriftAlt_VERSION + "${ThriftAlt_VERSION}" + PARENT_SCOPE) + else() + set(ThriftAlt_VERSION + "" + PARENT_SCOPE) + endif() +endfunction() + +if(MSVC_TOOLCHAIN AND NOT DEFINED THRIFT_MSVC_LIB_SUFFIX) + if(NOT ARROW_THRIFT_USE_SHARED) + if(ARROW_USE_STATIC_CRT) + if("${CMAKE_BUILD_TYPE}" STREQUAL "DEBUG") + set(THRIFT_MSVC_LIB_SUFFIX "mtd") + else() + set(THRIFT_MSVC_LIB_SUFFIX "mt") + endif() + else() + if("${CMAKE_BUILD_TYPE}" STREQUAL "DEBUG") + set(THRIFT_MSVC_LIB_SUFFIX "mdd") + else() + set(THRIFT_MSVC_LIB_SUFFIX "md") + endif() + endif() + endif() +endif() +set(ThriftAlt_LIB_NAME_BASE "thrift${THRIFT_MSVC_LIB_SUFFIX}") + +if(ARROW_THRIFT_USE_SHARED) + if(CMAKE_IMPORT_LIBRARY_SUFFIX) + set(ThriftAlt_LIB_NAME + "${CMAKE_IMPORT_LIBRARY_PREFIX}${ThriftAlt_LIB_NAME_BASE}${CMAKE_IMPORT_LIBRARY_SUFFIX}" + ) + else() + set(ThriftAlt_LIB_NAME + "${CMAKE_SHARED_LIBRARY_PREFIX}${ThriftAlt_LIB_NAME_BASE}${CMAKE_SHARED_LIBRARY_SUFFIX}" + ) + endif() +else() + set(ThriftAlt_LIB_NAME + "${CMAKE_STATIC_LIBRARY_PREFIX}${ThriftAlt_LIB_NAME_BASE}${CMAKE_STATIC_LIBRARY_SUFFIX}" + ) +endif() + +if(Thrift_ROOT) + find_library(ThriftAlt_LIB + NAMES ${ThriftAlt_LIB_NAME} + PATHS ${Thrift_ROOT} + PATH_SUFFIXES "lib/${CMAKE_LIBRARY_ARCHITECTURE}" "lib") + find_path(ThriftAlt_INCLUDE_DIR thrift/Thrift.h + PATHS ${Thrift_ROOT} + PATH_SUFFIXES "include") + find_program(THRIFT_COMPILER thrift + PATHS ${Thrift_ROOT} + PATH_SUFFIXES "bin") + extract_thrift_version() +else() + # THRIFT-4760: The pkgconfig files are currently only installed when using autotools. + # Starting with 0.13, they are also installed for the CMake-based installations of Thrift. + find_package(PkgConfig QUIET) + pkg_check_modules(THRIFT_PC thrift) + if(THRIFT_PC_FOUND) + set(ThriftAlt_INCLUDE_DIR "${THRIFT_PC_INCLUDEDIR}") + + list(APPEND THRIFT_PC_LIBRARY_DIRS "${THRIFT_PC_LIBDIR}") + + find_library(ThriftAlt_LIB + NAMES ${ThriftAlt_LIB_NAME} + PATHS ${THRIFT_PC_LIBRARY_DIRS} + NO_DEFAULT_PATH) + find_program(THRIFT_COMPILER thrift + HINTS ${THRIFT_PC_PREFIX} + NO_DEFAULT_PATH + PATH_SUFFIXES "bin") + set(ThriftAlt_VERSION ${THRIFT_PC_VERSION}) + else() + find_library(ThriftAlt_LIB + NAMES ${ThriftAlt_LIB_NAME} + PATH_SUFFIXES "lib/${CMAKE_LIBRARY_ARCHITECTURE}" "lib") + find_path(ThriftAlt_INCLUDE_DIR thrift/Thrift.h PATH_SUFFIXES "include") + find_program(THRIFT_COMPILER thrift PATH_SUFFIXES "bin") + extract_thrift_version() + endif() +endif() + +if(THRIFT_COMPILER) + set(Thrift_COMPILER_FOUND TRUE) +else() + set(Thrift_COMPILER_FOUND FALSE) +endif() + +find_package_handle_standard_args( + ThriftAlt + REQUIRED_VARS ThriftAlt_LIB ThriftAlt_INCLUDE_DIR + VERSION_VAR ThriftAlt_VERSION + HANDLE_COMPONENTS) + +if(ThriftAlt_FOUND) + set(Thrift_VERSION ${ThriftAlt_VERSION}) + set(ThriftAlt_IMPORTED_PROPERTY_NAME IMPORTED_LOCATION) + # Reuse partially defined thrift::thrift by ThriftConfig.cmake. + if(NOT TARGET thrift::thrift) + if(ARROW_THRIFT_USE_SHARED) + add_library(thrift::thrift SHARED IMPORTED) + if(CMAKE_IMPORT_LIBRARY_SUFFIX) + set(ThriftAlt_IMPORTED_PROPERTY_NAME IMPORTED_IMPLIB) + endif() + else() + add_library(thrift::thrift STATIC IMPORTED) + endif() + endif() + set_target_properties(thrift::thrift + PROPERTIES ${ThriftAlt_IMPORTED_PROPERTY_NAME} "${ThriftAlt_LIB}" + INTERFACE_INCLUDE_DIRECTORIES + "${ThriftAlt_INCLUDE_DIR}") + if(WIN32 AND NOT MSVC_TOOLCHAIN) + # We don't need this for Visual C++ because Thrift uses + # "#pragma comment(lib, "Ws2_32.lib")" in + # thrift/windows/config.h for Visual C++. + set_target_properties(thrift::thrift PROPERTIES INTERFACE_LINK_LIBRARIES "ws2_32") + endif() + # Workaround: thrift.pc doesn't have Boost dependency. + if(TARGET Boost::headers) + target_link_libraries(thrift::thrift INTERFACE Boost::headers) + endif() + + if(Thrift_COMPILER_FOUND) + add_executable(thrift::compiler IMPORTED) + set_target_properties(thrift::compiler PROPERTIES IMPORTED_LOCATION + "${THRIFT_COMPILER}") + endif() +endif() diff --git a/python/cmake_modules/FindabslAlt.cmake b/python/cmake_modules/FindabslAlt.cmake new file mode 100644 index 000000000000..a7ebe63f5eb4 --- /dev/null +++ b/python/cmake_modules/FindabslAlt.cmake @@ -0,0 +1,46 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +if(abslAlt_FOUND) + return() +endif() + +set(find_package_args) + +if(abslAlt_FIND_QUIETLY) + list(APPEND find_package_args QUIET) +endif() +if(abslAlt_FIND_REQUIRED) + list(APPEND find_package_args REQUIRED) +endif() + +find_package(absl ${find_package_args}) + +if(NOT DEFINED absl_VERSION) + # Abseil does not define a version when build 'live at head'. + # As this is their recommended path we need to define a large version to pass version checks. + # CMake removes the '_head' suffix for version comparison but it will show up in the logs + # and matches the abseil-cpp.pc version of 'head' + set(absl_VERSION 99999999_head) +endif() + +set(abslAlt_VERSION ${absl_VERSION}) + +find_package_handle_standard_args( + abslAlt + REQUIRED_VARS absl_FOUND + VERSION_VAR abslAlt_VERSION) diff --git a/python/cmake_modules/Findc-aresAlt.cmake b/python/cmake_modules/Findc-aresAlt.cmake new file mode 100644 index 000000000000..152c843e373c --- /dev/null +++ b/python/cmake_modules/Findc-aresAlt.cmake @@ -0,0 +1,77 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +if(c-aresAlt_FOUND) + return() +endif() + +set(find_package_args) +if(c-aresAlt_FIND_VERSION) + list(APPEND find_package_args ${c-aresAlt_FIND_VERSION}) +endif() +if(c-aresAlt_FIND_QUIETLY) + list(APPEND find_package_args QUIET) +endif() +find_package(c-ares ${find_package_args}) +if(c-ares_FOUND) + set(c-aresAlt_FOUND TRUE) + return() +endif() + +find_package(PkgConfig QUIET) +pkg_check_modules(c-ares_PC libcares) +if(c-ares_PC_FOUND) + set(c-ares_INCLUDE_DIR "${c-ares_PC_INCLUDEDIR}") + + list(APPEND c-ares_PC_LIBRARY_DIRS "${c-ares_PC_LIBDIR}") + find_library(c-ares_LIB cares + PATHS ${c-ares_PC_LIBRARY_DIRS} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES} + NO_DEFAULT_PATH) +elseif(c-ares_ROOT) + find_library(c-ares_LIB + NAMES cares + "${CMAKE_SHARED_LIBRARY_PREFIX}cares${CMAKE_SHARED_LIBRARY_SUFFIX}" + PATHS ${c-ares_ROOT} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES} + NO_DEFAULT_PATH) + find_path(c-ares_INCLUDE_DIR + NAMES ares.h + PATHS ${c-ares_ROOT} + NO_DEFAULT_PATH + PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES}) +else() + find_library(c-ares_LIB + NAMES cares + "${CMAKE_SHARED_LIBRARY_PREFIX}cares${CMAKE_SHARED_LIBRARY_SUFFIX}" + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES}) + find_path(c-ares_INCLUDE_DIR + NAMES ares.h + PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES}) +endif() + +find_package_handle_standard_args(c-aresAlt REQUIRED_VARS c-ares_LIB c-ares_INCLUDE_DIR) + +if(c-aresAlt_FOUND) + if(NOT TARGET c-ares::cares) + add_library(c-ares::cares UNKNOWN IMPORTED) + set_target_properties(c-ares::cares + PROPERTIES IMPORTED_LOCATION "${c-ares_LIB}" + INTERFACE_INCLUDE_DIRECTORIES + "${c-ares_INCLUDE_DIR}") + endif() +endif() diff --git a/python/cmake_modules/FindgRPCAlt.cmake b/python/cmake_modules/FindgRPCAlt.cmake new file mode 100644 index 000000000000..2ff10dbc23dd --- /dev/null +++ b/python/cmake_modules/FindgRPCAlt.cmake @@ -0,0 +1,101 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +if(gRPCAlt_FOUND) + return() +endif() + +set(find_package_args) +if(gRPCAlt_FIND_VERSION) + list(APPEND find_package_args ${gRPCAlt_FIND_VERSION}) +endif() +if(gRPCAlt_FIND_QUIETLY) + list(APPEND find_package_args QUIET) +endif() +find_package(gRPC ${find_package_args}) +if(gRPC_FOUND) + set(gRPCAlt_FOUND TRUE) + return() +endif() + +find_package(PkgConfig QUIET) +pkg_check_modules(GRPCPP_PC grpc++) +if(GRPCPP_PC_FOUND) + set(gRPCAlt_VERSION "${GRPCPP_PC_VERSION}") + set(GRPCPP_INCLUDE_DIRECTORIES ${GRPCPP_PC_INCLUDEDIR}) + # gRPC's pkg-config file neglects to specify pthreads. + find_package(Threads REQUIRED) + if(ARROW_GRPC_USE_SHARED) + set(GRPCPP_LINK_LIBRARIES ${GRPCPP_PC_LINK_LIBRARIES}) + set(GRPCPP_LINK_OPTIONS ${GRPCPP_PC_LDFLAGS_OTHER}) + set(GRPCPP_COMPILE_OPTIONS ${GRPCPP_PC_CFLAGS_OTHER}) + else() + set(GRPCPP_LINK_LIBRARIES) + foreach(GRPCPP_LIBRARY_NAME ${GRPCPP_PC_STATIC_LIBRARIES}) + find_library(GRPCPP_LIBRARY_${GRPCPP_LIBRARY_NAME} + NAMES "${CMAKE_STATIC_LIBRARY_PREFIX}${GRPCPP_LIBRARY_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX}" + HINTS ${GRPCPP_PC_STATIC_LIBRARY_DIRS}) + list(APPEND GRPCPP_LINK_LIBRARIES "${GRPCPP_LIBRARY_${GRPCPP_LIBRARY_NAME}}") + endforeach() + set(GRPCPP_LINK_OPTIONS ${GRPCPP_PC_STATIC_LDFLAGS_OTHER}) + set(GRPCPP_COMPILE_OPTIONS ${GRPCPP_PC_STATIC_CFLAGS_OTHER}) + endif() + list(APPEND GRPCPP_LINK_LIBRARIES Threads::Threads) + list(GET GRPCPP_LINK_LIBRARIES 0 GRPCPP_IMPORTED_LOCATION) + list(REMOVE_AT GRPCPP_LINK_LIBRARIES 0) + find_program(GRPC_CPP_PLUGIN grpc_cpp_plugin + HINTS ${GRPCPP_PC_PREFIX} + NO_DEFAULT_PATH + PATH_SUFFIXES "bin") +endif() +set(gRPCAlt_FIND_PACKAGE_ARGS gRPCAlt REQUIRED_VARS GRPCPP_IMPORTED_LOCATION + GRPC_CPP_PLUGIN) +if(gRPCAlt_VERSION) + list(APPEND gRPCAlt_FIND_PACKAGE_ARGS VERSION_VAR gRPCAlt_VERSION) +endif() +find_package_handle_standard_args(${gRPCAlt_FIND_PACKAGE_ARGS}) + +if(gRPCAlt_FOUND) + # gRPC does not expose the reflection library via pkg-config, but it should be alongside the main library + get_filename_component(GRPCPP_IMPORTED_DIRECTORY ${GRPCPP_IMPORTED_LOCATION} DIRECTORY) + if(ARROW_GRPC_USE_SHARED) + set(GRPCPP_REFLECTION_LIB_NAME + "${CMAKE_SHARED_LIBRARY_PREFIX}grpc++_reflection${CMAKE_SHARED_LIBRARY_SUFFIX}") + else() + set(GRPCPP_REFLECTION_LIB_NAME + "${CMAKE_STATIC_LIBRARY_PREFIX}grpc++_reflection${CMAKE_STATIC_LIBRARY_SUFFIX}") + endif() + find_library(GRPCPP_REFLECTION_IMPORTED_LOCATION + NAMES grpc++_reflection ${GRPCPP_REFLECTION_LIB_NAME} + PATHS ${GRPCPP_IMPORTED_DIRECTORY} + NO_DEFAULT_PATH) + + add_library(gRPC::grpc++ UNKNOWN IMPORTED) + set_target_properties(gRPC::grpc++ + PROPERTIES IMPORTED_LOCATION "${GRPCPP_IMPORTED_LOCATION}" + INTERFACE_COMPILE_OPTIONS "${GRPCPP_COMPILE_OPTIONS}" + INTERFACE_INCLUDE_DIRECTORIES + "${GRPCPP_INCLUDE_DIRECTORIES}" + INTERFACE_LINK_LIBRARIES "${GRPCPP_LINK_LIBRARIES}" + INTERFACE_LINK_OPTIONS "${GRPCPP_LINK_OPTIONS}") + + add_library(gRPC::grpc++_reflection UNKNOWN IMPORTED) + set_target_properties(gRPC::grpc++_reflection + PROPERTIES IMPORTED_LOCATION + "${GRPCPP_REFLECTION_IMPORTED_LOCATION}" + INTERFACE_LINK_LIBRARIES gRPC::grpc++) + + add_executable(gRPC::grpc_cpp_plugin IMPORTED) + set_target_properties(gRPC::grpc_cpp_plugin PROPERTIES IMPORTED_LOCATION + ${GRPC_CPP_PLUGIN}) +endif() diff --git a/python/cmake_modules/FindgflagsAlt.cmake b/python/cmake_modules/FindgflagsAlt.cmake new file mode 100644 index 000000000000..40733ee9bc48 --- /dev/null +++ b/python/cmake_modules/FindgflagsAlt.cmake @@ -0,0 +1,63 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +if(gflagsAlt_FOUND) + return() +endif() + +set(find_package_args) +if(gflagsAlt_FIND_VERSION) + list(APPEND find_package_args ${gflagsAlt_FIND_VERSION}) +endif() +if(gflagsAlt_FIND_QUIETLY) + list(APPEND find_package_args QUIET) +endif() +find_package(gflags ${find_package_args}) +if(gflags_FOUND) + set(gflagsAlt_FOUND TRUE) + return() +endif() + +# TODO: Support version detection. + +if(gflags_ROOT) + find_library(gflags_LIB + NAMES gflags + PATHS ${gflags_ROOT} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES} + NO_DEFAULT_PATH) + find_path(GFLAGS_INCLUDE_DIR + NAMES gflags/gflags.h + PATHS ${gflags_ROOT} + NO_DEFAULT_PATH + PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES}) +else() + find_library(gflags_LIB NAMES gflags) + find_path(GFLAGS_INCLUDE_DIR + NAMES gflags/gflags.h + PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES}) +endif() + +find_package_handle_standard_args(gflagsAlt REQUIRED_VARS gflags_LIB GFLAGS_INCLUDE_DIR) + +if(gflagsAlt_FOUND) + add_library(gflags::gflags UNKNOWN IMPORTED) + set_target_properties(gflags::gflags + PROPERTIES IMPORTED_LOCATION "${gflags_LIB}" + INTERFACE_INCLUDE_DIRECTORIES "${GFLAGS_INCLUDE_DIR}") + set(GFLAGS_LIBRARIES gflags::gflags) +endif() diff --git a/python/cmake_modules/FindglogAlt.cmake b/python/cmake_modules/FindglogAlt.cmake new file mode 100644 index 000000000000..eb16636add95 --- /dev/null +++ b/python/cmake_modules/FindglogAlt.cmake @@ -0,0 +1,74 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Tries to find GLog headers and libraries. +# +# Usage of this module as follows: +# +# find_package(glogAlt) + +if(glogAlt_FOUND) + return() +endif() + +set(find_package_args CONFIG) +if(glogAlt_FIND_VERSION) + list(APPEND find_package_args ${glogAlt_FIND_VERSION}) +endif() +if(glogAlt_FIND_QUIETLY) + list(APPEND find_package_args QUIET) +endif() +find_package(glog ${find_package_args}) +if(glog_FOUND) + set(glogAlt_FOUND TRUE) + return() +endif() + +find_package(PkgConfig QUIET) +pkg_check_modules(GLOG_PC libglog) +if(GLOG_PC_FOUND) + set(GLOG_INCLUDE_DIR "${GLOG_PC_INCLUDEDIR}") + list(APPEND GLOG_PC_LIBRARY_DIRS "${GLOG_PC_LIBDIR}") + find_library(GLOG_LIB glog + PATHS ${GLOG_PC_LIBRARY_DIRS} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES} + NO_DEFAULT_PATH) +elseif(GLOG_ROOT) + find_library(GLOG_LIB + NAMES glog + PATHS ${GLOG_ROOT} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES} + NO_DEFAULT_PATH) + find_path(GLOG_INCLUDE_DIR + NAMES glog/logging.h + PATHS ${GLOG_ROOT} + NO_DEFAULT_PATH + PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES}) +else() + find_library(GLOG_LIB + NAMES glog + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES}) + find_path(GLOG_INCLUDE_DIR + NAMES glog/logging.h + PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES}) +endif() + +find_package_handle_standard_args(glogAlt REQUIRED_VARS GLOG_INCLUDE_DIR GLOG_LIB) + +if(glogAlt_FOUND) + add_library(glog::glog UNKNOWN IMPORTED) + set_target_properties(glog::glog + PROPERTIES IMPORTED_LOCATION "${GLOG_LIB}" + INTERFACE_INCLUDE_DIRECTORIES "${GLOG_INCLUDE_DIR}" + INTERFACE_COMPILE_DEFINITIONS "GLOG_USE_GLOG_EXPORT") +endif() diff --git a/python/cmake_modules/FindjemallocAlt.cmake b/python/cmake_modules/FindjemallocAlt.cmake new file mode 100644 index 000000000000..49616425db49 --- /dev/null +++ b/python/cmake_modules/FindjemallocAlt.cmake @@ -0,0 +1,105 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Tries to find jemalloc headers and libraries. +# +# Usage of this module as follows: +# +# find_package(jemallocAlt) +# +# This module defines +# jemalloc::jemalloc, target to use jemalloc + +if(jemallocAlt_FOUND) + return() +endif() + +if(ARROW_PACKAGE_KIND STREQUAL "conan") + set(find_package_args "") + if(jemallocAlt_FIND_VERSION) + list(APPEND find_package_args ${jemallocAlt_FIND_VERSION}) + endif() + if(jemallocAlt_FIND_QUIETLY) + list(APPEND find_package_args QUIET) + endif() + if(jemallocAlt_FIND_REQUIRED) + list(APPEND find_package_args REQUIRED) + endif() + find_package(jemallocAlt NAMES jemalloc ${find_package_args}) + set(jemalloc_FOUND ${jemallocAlt_FOUND}) + if(jemallocAlt_FOUND) + return() + endif() +endif() + +if(ARROW_JEMALLOC_USE_SHARED) + set(jemallocAlt_LIB_NAMES) + if(CMAKE_IMPORT_LIBRARY_SUFFIX) + list(APPEND jemallocAlt_LIB_NAMES + "${CMAKE_IMPORT_LIBRARY_PREFIX}jemalloc${CMAKE_IMPORT_LIBRARY_SUFFIX}") + endif() + list(APPEND jemallocAlt_LIB_NAMES + "${CMAKE_SHARED_LIBRARY_PREFIX}jemalloc${CMAKE_SHARED_LIBRARY_SUFFIX}") +else() + set(jemallocAlt_LIB_NAMES + "${CMAKE_STATIC_LIBRARY_PREFIX}jemalloc${CMAKE_STATIC_LIBRARY_SUFFIX}") +endif() + +if(jemalloc_ROOT) + find_library(jemallocAlt_LIB + NAMES ${jemallocAlt_LIB_NAMES} + PATHS ${jemalloc_ROOT} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES} + NO_DEFAULT_PATH) + find_path(jemallocAlt_INCLUDE_DIR + NAMES jemalloc/jemalloc.h + PATHS ${jemalloc_ROOT} + NO_DEFAULT_PATH + PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES}) + +else() + find_package(PkgConfig QUIET) + pkg_check_modules(jemallocAlt_PC jemalloc) + if(jemallocAlt_PC_FOUND) + set(jemallocAlt_INCLUDE_DIR "${jemallocAlt_PC_INCLUDEDIR}") + list(APPEND jemallocAlt_PC_LIBRARY_DIRS "${jemallocAlt_PC_LIBDIR}") + find_library(jemallocAlt_LIB + NAMES ${jemallocAlt_LIB_NAMES} + PATHS ${jemallocAlt_PC_LIBRARY_DIRS} + NO_DEFAULT_PATH + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES}) + else() + find_library(jemallocAlt_LIB + NAMES ${jemallocAlt_LIB_NAMES} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES}) + find_path(jemallocAlt_INCLUDE_DIR + NAMES jemalloc/jemalloc.h + PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES}) + endif() +endif() + +find_package_handle_standard_args(jemallocAlt REQUIRED_VARS jemallocAlt_LIB + jemallocAlt_INCLUDE_DIR) +set(jemalloc_FOUND ${jemallocAlt_FOUND}) +if(jemallocAlt_FOUND) + if(NOT TARGET jemalloc::jemalloc) + if(ARROW_JEMALLOC_USE_SHARED) + add_library(jemalloc::jemalloc SHARED IMPORTED) + else() + add_library(jemalloc::jemalloc STATIC IMPORTED) + endif() + set_target_properties(jemalloc::jemalloc + PROPERTIES IMPORTED_LOCATION "${jemallocAlt_LIB}" + INTERFACE_INCLUDE_DIRECTORIES + "${jemallocAlt_INCLUDE_DIR}") + endif() +endif() diff --git a/python/cmake_modules/Findlibrados.cmake b/python/cmake_modules/Findlibrados.cmake new file mode 100644 index 000000000000..b993dbff114e --- /dev/null +++ b/python/cmake_modules/Findlibrados.cmake @@ -0,0 +1,38 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +if(librados_FOUND) + return() +endif() + +find_path(LIBRADOS_INCLUDE_DIR rados/librados.hpp) + +find_library(LIBRADOS_LIBRARY NAMES rados) + +mark_as_advanced(LIBRADOS_LIBRARY LIBRADOS_INCLUDE_DIR) + +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(librados DEFAULT_MSG LIBRADOS_LIBRARY + LIBRADOS_INCLUDE_DIR) + +if(librados_FOUND) + add_library(librados::rados UNKNOWN IMPORTED) + set_target_properties(librados::rados + PROPERTIES IMPORTED_LOCATION "${LIBRADOS_LIBRARY}" + INTERFACE_INCLUDE_DIRECTORIES + "${LIBRADOS_INCLUDE_DIR}") +endif() diff --git a/python/cmake_modules/Findlz4Alt.cmake b/python/cmake_modules/Findlz4Alt.cmake new file mode 100644 index 000000000000..91e735107a95 --- /dev/null +++ b/python/cmake_modules/Findlz4Alt.cmake @@ -0,0 +1,112 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +if(lz4Alt_FOUND) + return() +endif() + +set(find_package_args) +if(lz4Alt_FIND_VERSION) + list(APPEND find_package_args ${lz4Alt_FIND_VERSION}) +endif() +if(lz4Alt_FIND_QUIETLY) + list(APPEND find_package_args QUIET) +endif() +find_package(lz4 ${find_package_args}) +if(lz4_FOUND) + set(lz4Alt_FOUND TRUE) + if(NOT TARGET LZ4::lz4) + # Conan uses lz4::lz4 not LZ4::lz4 + if(TARGET lz4::lz4) + add_library(LZ4::lz4 ALIAS lz4::lz4) + elseif(ARROW_LZ4_USE_SHARED) + add_library(LZ4::lz4 ALIAS LZ4::lz4_shared) + else() + add_library(LZ4::lz4 ALIAS LZ4::lz4_static) + endif() + endif() + return() +endif() + +if(MSVC_TOOLCHAIN AND NOT DEFINED LZ4_MSVC_LIB_PREFIX) + set(LZ4_MSVC_LIB_PREFIX "lib") +endif() +set(LZ4_LIB_NAME_BASE "${LZ4_MSVC_LIB_PREFIX}lz4") + +if(ARROW_LZ4_USE_SHARED) + set(LZ4_LIB_NAMES) + if(CMAKE_IMPORT_LIBRARY_SUFFIX) + list(APPEND + LZ4_LIB_NAMES + "${CMAKE_IMPORT_LIBRARY_PREFIX}${LZ4_LIB_NAME_BASE}${CMAKE_IMPORT_LIBRARY_SUFFIX}" + ) + endif() + list(APPEND LZ4_LIB_NAMES + "${CMAKE_SHARED_LIBRARY_PREFIX}${LZ4_LIB_NAME_BASE}${CMAKE_SHARED_LIBRARY_SUFFIX}") +else() + if(MSVC AND NOT DEFINED LZ4_MSVC_STATIC_LIB_SUFFIX) + set(LZ4_MSVC_STATIC_LIB_SUFFIX "_static") + endif() + set(LZ4_STATIC_LIB_SUFFIX "${LZ4_MSVC_STATIC_LIB_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}") + set(LZ4_LIB_NAMES + "${CMAKE_STATIC_LIBRARY_PREFIX}${LZ4_LIB_NAME_BASE}${LZ4_STATIC_LIB_SUFFIX}") +endif() + +if(LZ4_ROOT) + find_library(LZ4_LIB + NAMES ${LZ4_LIB_NAMES} + PATHS ${LZ4_ROOT} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES} + NO_DEFAULT_PATH) + find_path(LZ4_INCLUDE_DIR + NAMES lz4.h + PATHS ${LZ4_ROOT} + NO_DEFAULT_PATH + PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES}) + +else() + find_package(PkgConfig QUIET) + pkg_check_modules(LZ4_PC liblz4) + if(LZ4_PC_FOUND) + set(LZ4_INCLUDE_DIR "${LZ4_PC_INCLUDEDIR}") + + list(APPEND LZ4_PC_LIBRARY_DIRS "${LZ4_PC_LIBDIR}") + find_library(LZ4_LIB + NAMES ${LZ4_LIB_NAMES} + PATHS ${LZ4_PC_LIBRARY_DIRS} + NO_DEFAULT_PATH + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES}) + else() + find_library(LZ4_LIB + NAMES ${LZ4_LIB_NAMES} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES}) + find_path(LZ4_INCLUDE_DIR + NAMES lz4.h + PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES}) + endif() +endif() + +find_package_handle_standard_args(lz4Alt REQUIRED_VARS LZ4_LIB LZ4_INCLUDE_DIR) + +if(lz4Alt_FOUND) + if(NOT TARGET LZ4::lz4) + add_library(LZ4::lz4 UNKNOWN IMPORTED) + set_target_properties(LZ4::lz4 + PROPERTIES IMPORTED_LOCATION "${LZ4_LIB}" + INTERFACE_INCLUDE_DIRECTORIES "${LZ4_INCLUDE_DIR}") + endif() +endif() diff --git a/python/cmake_modules/FindorcAlt.cmake b/python/cmake_modules/FindorcAlt.cmake new file mode 100644 index 000000000000..ce8cd11b4c3f --- /dev/null +++ b/python/cmake_modules/FindorcAlt.cmake @@ -0,0 +1,75 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +if(orcAlt_FOUND) + return() +endif() + +set(find_package_args) +if(orcAlt_FIND_VERSION) + list(APPEND find_package_args ${orcAlt_FIND_VERSION}) +endif() +if(orcAlt_FIND_QUIETLY) + list(APPEND find_package_args QUIET) +endif() +find_package(orc ${find_package_args}) +if(orc_FOUND) + set(orcAlt_FOUND TRUE) + set(orcAlt_VERSION ${orc_VERSION}) + return() +endif() + +if(ORC_ROOT) + find_library(ORC_STATIC_LIB + NAMES orc + PATHS ${ORC_ROOT} + NO_DEFAULT_PATH + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES}) + find_path(ORC_INCLUDE_DIR + NAMES orc/orc-config.hh + PATHS ${ORC_ROOT} + NO_DEFAULT_PATH + PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES}) +else() + find_library(ORC_STATIC_LIB + NAMES orc + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES}) + find_path(ORC_INCLUDE_DIR + NAMES orc/orc-config.hh + PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES}) +endif() +if(ORC_INCLUDE_DIR) + file(READ "${ORC_INCLUDE_DIR}/orc/orc-config.hh" ORC_CONFIG_HH_CONTENT) + string(REGEX MATCH "#define ORC_VERSION \"[0-9.]+\"" ORC_VERSION_DEFINITION + "${ORC_CONFIG_HH_CONTENT}") + string(REGEX MATCH "[0-9.]+" ORC_VERSION "${ORC_VERSION_DEFINITION}") +endif() + +find_package_handle_standard_args( + orcAlt + REQUIRED_VARS ORC_STATIC_LIB ORC_INCLUDE_DIR + VERSION_VAR ORC_VERSION) + +if(orcAlt_FOUND) + if(NOT TARGET orc::orc) + add_library(orc::orc STATIC IMPORTED) + set_target_properties(orc::orc + PROPERTIES IMPORTED_LOCATION "${ORC_STATIC_LIB}" + INTERFACE_INCLUDE_DIRECTORIES "${ORC_INCLUDE_DIR}") + endif() + set(orcAlt_VERSION ${ORC_VERSION}) +endif() diff --git a/python/cmake_modules/Findre2Alt.cmake b/python/cmake_modules/Findre2Alt.cmake new file mode 100644 index 000000000000..a83c02216c26 --- /dev/null +++ b/python/cmake_modules/Findre2Alt.cmake @@ -0,0 +1,93 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +if(re2Alt_FOUND) + return() +endif() + +set(find_package_args) +if(re2Alt_FIND_VERSION) + list(APPEND find_package_args ${re2Alt_FIND_VERSION}) +endif() +if(re2Alt_FIND_QUIETLY) + list(APPEND find_package_args QUIET) +endif() +find_package(re2 ${find_package_args}) +if(re2_FOUND) + set(re2Alt_FOUND TRUE) + return() +endif() + +if(re2_ROOT) + find_library(RE2_LIB + NAMES re2_static + re2 + "${CMAKE_STATIC_LIBRARY_PREFIX}re2${RE2_MSVC_STATIC_LIB_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}" + "${CMAKE_SHARED_LIBRARY_PREFIX}re2${CMAKE_SHARED_LIBRARY_SUFFIX}" + PATHS ${re2_ROOT} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES} + NO_DEFAULT_PATH) + find_path(RE2_INCLUDE_DIR + NAMES re2/re2.h + PATHS ${re2_ROOT} + NO_DEFAULT_PATH + PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES}) +else() + find_package(PkgConfig QUIET) + pkg_check_modules(RE2_PC re2) + if(RE2_PC_FOUND) + set(RE2_INCLUDE_DIR "${RE2_PC_INCLUDEDIR}") + + list(APPEND RE2_PC_LIBRARY_DIRS "${RE2_PC_LIBDIR}") + find_library(RE2_LIB re2 + PATHS ${RE2_PC_LIBRARY_DIRS} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES} + NO_DEFAULT_PATH) + + # On Fedora, the reported prefix is wrong. As users likely run into this, + # workaround. + # https://bugzilla.redhat.com/show_bug.cgi?id=1652589 + if(UNIX + AND NOT APPLE + AND NOT RE2_LIB) + if(RE2_PC_PREFIX STREQUAL "/usr/local") + find_library(RE2_LIB re2) + endif() + endif() + else() + find_library(RE2_LIB + NAMES re2_static + re2 + "${CMAKE_STATIC_LIBRARY_PREFIX}re2${RE2_MSVC_STATIC_LIB_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}" + "${CMAKE_SHARED_LIBRARY_PREFIX}re2${CMAKE_SHARED_LIBRARY_SUFFIX}" + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES}) + find_path(RE2_INCLUDE_DIR + NAMES re2/re2.h + PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES}) + endif() +endif() + +find_package_handle_standard_args(re2Alt REQUIRED_VARS RE2_LIB RE2_INCLUDE_DIR) + +if(re2Alt_FOUND) + if(NOT TARGET re2::re2) + add_library(re2::re2 UNKNOWN IMPORTED) + set_target_properties(re2::re2 + PROPERTIES IMPORTED_LOCATION "${RE2_LIB}" + INTERFACE_INCLUDE_DIRECTORIES "${RE2_INCLUDE_DIR}") + endif() +endif() diff --git a/python/cmake_modules/Findutf8proc.cmake b/python/cmake_modules/Findutf8proc.cmake new file mode 100644 index 000000000000..75485427222b --- /dev/null +++ b/python/cmake_modules/Findutf8proc.cmake @@ -0,0 +1,122 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +if(utf8proc_FOUND) + return() +endif() + +if(ARROW_VCPKG) + set(find_package_args "") + if(utf8proc_FIND_VERSION) + list(APPEND find_package_args ${utf8proc_FIND_VERSION}) + endif() + if(utf8proc_FIND_QUIETLY) + list(APPEND find_package_args QUIET) + endif() + if(utf8proc_FIND_REQUIRED) + list(APPEND find_package_args REQUIRED) + endif() + find_package(utf8proc NAMES unofficial-utf8proc ${find_package_args}) + if(utf8proc_FOUND) + return() + endif() +endif() + +function(extract_utf8proc_version) + if(utf8proc_INCLUDE_DIR) + file(READ "${utf8proc_INCLUDE_DIR}/utf8proc.h" UTF8PROC_H_CONTENT) + + string(REGEX MATCH "#define UTF8PROC_VERSION_MAJOR [0-9]+" + UTF8PROC_MAJOR_VERSION_DEFINITION "${UTF8PROC_H_CONTENT}") + string(REGEX MATCH "#define UTF8PROC_VERSION_MINOR [0-9]+" + UTF8PROC_MINOR_VERSION_DEFINITION "${UTF8PROC_H_CONTENT}") + string(REGEX MATCH "#define UTF8PROC_VERSION_PATCH [0-9]+" + UTF8PROC_PATCH_VERSION_DEFINITION "${UTF8PROC_H_CONTENT}") + + string(REGEX MATCH "[0-9]+$" UTF8PROC_MAJOR_VERSION + "${UTF8PROC_MAJOR_VERSION_DEFINITION}") + string(REGEX MATCH "[0-9]+$" UTF8PROC_MINOR_VERSION + "${UTF8PROC_MINOR_VERSION_DEFINITION}") + string(REGEX MATCH "[0-9]+$" UTF8PROC_PATCH_VERSION + "${UTF8PROC_PATCH_VERSION_DEFINITION}") + set(utf8proc_VERSION + "${UTF8PROC_MAJOR_VERSION}.${UTF8PROC_MINOR_VERSION}.${UTF8PROC_PATCH_VERSION}" + PARENT_SCOPE) + else() + set(utf8proc_VERSION + "" + PARENT_SCOPE) + endif() +endfunction(extract_utf8proc_version) + +if(ARROW_UTF8PROC_USE_SHARED) + set(utf8proc_LIB_NAMES) + if(CMAKE_IMPORT_LIBRARY_SUFFIX) + list(APPEND utf8proc_LIB_NAMES + "${CMAKE_IMPORT_LIBRARY_PREFIX}utf8proc${CMAKE_IMPORT_LIBRARY_SUFFIX}") + endif() + list(APPEND utf8proc_LIB_NAMES + "${CMAKE_SHARED_LIBRARY_PREFIX}utf8proc${CMAKE_SHARED_LIBRARY_SUFFIX}") +else() + if(MSVC AND NOT DEFINED utf8proc_MSVC_STATIC_LIB_SUFFIX) + set(utf8proc_MSVC_STATIC_LIB_SUFFIX "_static") + endif() + set(utf8proc_STATIC_LIB_SUFFIX + "${utf8proc_MSVC_STATIC_LIB_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}") + set(utf8proc_LIB_NAMES + "${CMAKE_STATIC_LIBRARY_PREFIX}utf8proc${utf8proc_STATIC_LIB_SUFFIX}") +endif() + +if(utf8proc_ROOT) + find_library(utf8proc_LIB + NAMES ${utf8proc_LIB_NAMES} + PATHS ${utf8proc_ROOT} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES} + NO_DEFAULT_PATH) + find_path(utf8proc_INCLUDE_DIR + NAMES utf8proc.h + PATHS ${utf8proc_ROOT} + NO_DEFAULT_PATH + PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES}) + extract_utf8proc_version() +else() + find_library(utf8proc_LIB + NAMES ${utf8proc_LIB_NAMES} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES}) + find_path(utf8proc_INCLUDE_DIR + NAMES utf8proc.h + PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES}) + extract_utf8proc_version() +endif() + +find_package_handle_standard_args( + utf8proc + REQUIRED_VARS utf8proc_LIB utf8proc_INCLUDE_DIR + VERSION_VAR utf8proc_VERSION) + +if(utf8proc_FOUND) + set(utf8proc_FOUND TRUE) + add_library(utf8proc::utf8proc UNKNOWN IMPORTED) + set_target_properties(utf8proc::utf8proc + PROPERTIES IMPORTED_LOCATION "${utf8proc_LIB}" + INTERFACE_INCLUDE_DIRECTORIES + "${utf8proc_INCLUDE_DIR}") + if(NOT ARROW_UTF8PROC_USE_SHARED) + set_target_properties(utf8proc::utf8proc PROPERTIES INTERFACE_COMPILE_DEFINITIONS + "UTF8PROC_STATIC") + endif() +endif() diff --git a/python/cmake_modules/FindzstdAlt.cmake b/python/cmake_modules/FindzstdAlt.cmake new file mode 100644 index 000000000000..980cf265521d --- /dev/null +++ b/python/cmake_modules/FindzstdAlt.cmake @@ -0,0 +1,142 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +if(zstdAlt_FOUND) + return() +endif() + +set(find_package_args) +if(zstdAlt_FIND_VERSION) + list(APPEND find_package_args ${zstdAlt_FIND_VERSION}) +endif() +if(zstdAlt_FIND_QUIETLY) + list(APPEND find_package_args QUIET) +endif() +find_package(zstd ${find_package_args}) +if(zstd_FOUND) + set(zstdAlt_FOUND TRUE) + return() +endif() + +if(MSVC AND NOT DEFINED ZSTD_MSVC_LIB_PREFIX) + set(ZSTD_MSVC_LIB_PREFIX "lib") +endif() +set(ZSTD_LIB_NAME_BASE "${ZSTD_MSVC_LIB_PREFIX}zstd") + +if(ARROW_ZSTD_USE_SHARED) + set(ZSTD_LIB_NAMES) + if(CMAKE_IMPORT_LIBRARY_SUFFIX) + list(APPEND + ZSTD_LIB_NAMES + "${CMAKE_IMPORT_LIBRARY_PREFIX}${ZSTD_LIB_NAME_BASE}${CMAKE_IMPORT_LIBRARY_SUFFIX}" + ) + endif() + list(APPEND ZSTD_LIB_NAMES + "${CMAKE_SHARED_LIBRARY_PREFIX}${ZSTD_LIB_NAME_BASE}${CMAKE_SHARED_LIBRARY_SUFFIX}" + ) +else() + if(MSVC AND NOT DEFINED ZSTD_MSVC_STATIC_LIB_SUFFIX) + set(ZSTD_MSVC_STATIC_LIB_SUFFIX "_static") + endif() + set(ZSTD_STATIC_LIB_SUFFIX + "${ZSTD_MSVC_STATIC_LIB_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}") + set(ZSTD_LIB_NAMES + "${CMAKE_STATIC_LIBRARY_PREFIX}${ZSTD_LIB_NAME_BASE}${ZSTD_STATIC_LIB_SUFFIX}") +endif() + +# First, find via if specified ZSTD_ROOT +if(ZSTD_ROOT) + message(STATUS "Using ZSTD_ROOT: ${ZSTD_ROOT}") + find_library(ZSTD_LIB + NAMES ${ZSTD_LIB_NAMES} + PATHS ${ZSTD_ROOT} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES} + NO_DEFAULT_PATH) + find_path(ZSTD_INCLUDE_DIR + NAMES zstd.h + PATHS ${ZSTD_ROOT} + NO_DEFAULT_PATH + PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES}) + +else() + # Second, find via pkg_check_modules + find_package(PkgConfig QUIET) + pkg_check_modules(ZSTD_PC libzstd) + if(ZSTD_PC_FOUND) + set(zstdAlt_VERSION "${ZSTD_PC_VERSION}") + set(ZSTD_INCLUDE_DIR "${ZSTD_PC_INCLUDEDIR}") + + list(APPEND ZSTD_PC_LIBRARY_DIRS "${ZSTD_PC_LIBDIR}") + find_library(ZSTD_LIB + NAMES ${ZSTD_LIB_NAMES} + PATHS ${ZSTD_PC_LIBRARY_DIRS} + NO_DEFAULT_PATH + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES}) + else() + # Third, check all other CMake paths + find_library(ZSTD_LIB + NAMES ${ZSTD_LIB_NAMES} + PATH_SUFFIXES ${ARROW_LIBRARY_PATH_SUFFIXES}) + find_path(ZSTD_INCLUDE_DIR + NAMES zstd.h + PATH_SUFFIXES ${ARROW_INCLUDE_PATH_SUFFIXES}) + endif() +endif() + +if("${zstdAlt_VERSION}" STREQUAL "" AND ZSTD_INCLUDE_DIR) + file(READ "${ZSTD_INCLUDE_DIR}/zstd.h" ZSTD_H_CONTENT) + string(REGEX MATCH "#define ZSTD_VERSION_MAJOR +([0-9]+)" ZSTD_VERSION_MAJOR_DEFINITION + "${ZSTD_H_CONTENT}") + string(REGEX REPLACE "^.+ ([0-9]+)$" "\\1" ZSTD_VERSION_MAJOR + "${ZSTD_VERSION_MAJOR_DEFINITION}") + string(REGEX MATCH "#define ZSTD_VERSION_MINOR +([0-9]+)" ZSTD_VERSION_MINOR_DEFINITION + "${ZSTD_H_CONTENT}") + string(REGEX REPLACE "^.+ ([0-9]+)$" "\\1" ZSTD_VERSION_MINOR + "${ZSTD_VERSION_MINOR_DEFINITION}") + string(REGEX MATCH "#define ZSTD_VERSION_RELEASE +([0-9]+)" + ZSTD_VERSION_RELEASE_DEFINITION "${ZSTD_H_CONTENT}") + string(REGEX REPLACE "^.+ ([0-9]+)$" "\\1" ZSTD_VERSION_RELEASE + "${ZSTD_VERSION_RELEASE_DEFINITION}") + if("${ZSTD_VERSION_MAJOR}" STREQUAL "" + OR "${ZSTD_VERSION_MINOR}" STREQUAL "" + OR "${ZSTD_VERSION_RELEASE}" STREQUAL "") + set(zstdAlt_VERSION "0.0.0") + else() + set(zstdAlt_VERSION + "${ZSTD_VERSION_MAJOR}.${ZSTD_VERSION_MINOR}.${ZSTD_VERSION_RELEASE}") + endif() +endif() + +find_package_handle_standard_args( + zstdAlt + REQUIRED_VARS ZSTD_LIB ZSTD_INCLUDE_DIR + VERSION_VAR zstdAlt_VERSION) + +if(zstdAlt_FOUND) + if(ARROW_ZSTD_USE_SHARED) + set(zstd_TARGET zstd::libzstd_shared) + add_library(${zstd_TARGET} SHARED IMPORTED) + else() + set(zstd_TARGET zstd::libzstd_static) + add_library(${zstd_TARGET} STATIC IMPORTED) + endif() + set_target_properties(${zstd_TARGET} + PROPERTIES IMPORTED_LOCATION "${ZSTD_LIB}" + INTERFACE_INCLUDE_DIRECTORIES "${ZSTD_INCLUDE_DIR}") + message(STATUS "Zstandard library: ${ZSTD_LIB}") + message(STATUS "Zstandard include directory: ${ZSTD_INCLUDE_DIR}") +endif() diff --git a/python/cmake_modules/GandivaAddBitcode.cmake b/python/cmake_modules/GandivaAddBitcode.cmake new file mode 100644 index 000000000000..b22581b4a115 --- /dev/null +++ b/python/cmake_modules/GandivaAddBitcode.cmake @@ -0,0 +1,75 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# Create bitcode for the given source file. +function(gandiva_add_bitcode SOURCE) + set(CLANG_OPTIONS -std=c++20) + if(MSVC) + # "19.20" means that it's compatible with Visual Studio 16 2019. + # We can update this to "19.30" when we dropped support for Visual + # Studio 16 2019. + # + # See https://cmake.org/cmake/help/latest/variable/MSVC_VERSION.html + # for MSVC_VERSION and Visual Studio version. + set(FMS_COMPATIBILITY 19.20) + list(APPEND CLANG_OPTIONS -fms-compatibility + -fms-compatibility-version=${FMS_COMPATIBILITY}) + endif() + + get_filename_component(SOURCE_BASE ${SOURCE} NAME_WE) + get_filename_component(ABSOLUTE_SOURCE ${SOURCE} ABSOLUTE) + set(BC_FILE ${CMAKE_CURRENT_BINARY_DIR}/${SOURCE_BASE}.bc) + set(PRECOMPILE_COMMAND) + if(CMAKE_OSX_SYSROOT) + list(APPEND + PRECOMPILE_COMMAND + ${CMAKE_COMMAND} + -E + env + SDKROOT=${CMAKE_OSX_SYSROOT}) + endif() + list(APPEND + PRECOMPILE_COMMAND + ${CLANG_EXECUTABLE} + ${CLANG_OPTIONS} + -DGANDIVA_IR + -DNDEBUG # DCHECK macros not implemented in precompiled code + -DARROW_STATIC # Do not set __declspec(dllimport) on MSVC on Arrow symbols + -DGANDIVA_STATIC # Do not set __declspec(dllimport) on MSVC on Gandiva symbols + -fno-use-cxa-atexit # Workaround for unresolved __dso_handle + -emit-llvm + -O3 + -c + ${ABSOLUTE_SOURCE} + -o + ${BC_FILE} + ${ARROW_GANDIVA_PC_CXX_FLAGS}) + if(ARROW_BINARY_DIR) + list(APPEND PRECOMPILE_COMMAND -I${ARROW_BINARY_DIR}/src) + endif() + if(ARROW_SOURCE_DIR) + list(APPEND PRECOMPILE_COMMAND -I${ARROW_SOURCE_DIR}/src) + endif() + if(NOT ARROW_USE_NATIVE_INT128) + foreach(boost_include_dir ${Boost_INCLUDE_DIRS}) + list(APPEND PRECOMPILE_COMMAND -I${boost_include_dir}) + endforeach() + endif() + add_custom_command(OUTPUT ${BC_FILE} + COMMAND ${PRECOMPILE_COMMAND} + DEPENDS ${SOURCE}) +endfunction() diff --git a/python/cmake_modules/SetupCxxFlags.cmake b/python/cmake_modules/SetupCxxFlags.cmake new file mode 100644 index 000000000000..c35fc6a6fe73 --- /dev/null +++ b/python/cmake_modules/SetupCxxFlags.cmake @@ -0,0 +1,750 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# Check if the target architecture and compiler supports some special +# instruction sets that would boost performance. +include(CheckCXXCompilerFlag) +include(CheckCXXSourceCompiles) +# Get cpu architecture + +message(STATUS "System processor: ${CMAKE_SYSTEM_PROCESSOR}") + +if(NOT DEFINED ARROW_CPU_FLAG) + if(CMAKE_SYSTEM_NAME STREQUAL "Emscripten") + set(ARROW_CPU_FLAG "emscripten") + elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "AMD64|amd64|X86|x86|i[3456]86|x64") + set(ARROW_CPU_FLAG "x86") + elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^arm$|armv[4-7]|armv8l") + set(ARROW_CPU_FLAG "aarch32") + elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|ARM64|arm64|armv") + set(ARROW_CPU_FLAG "aarch64") + elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "powerpc|ppc") + set(ARROW_CPU_FLAG "ppc") + elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "s390x") + set(ARROW_CPU_FLAG "s390x") + elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "riscv64") + set(ARROW_CPU_FLAG "riscv64") + elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "loongarch64") + set(ARROW_CPU_FLAG "loongarch64") + else() + message(FATAL_ERROR "Unknown system processor") + endif() +endif() + +# Check architecture specific compiler flags +if(ARROW_CPU_FLAG STREQUAL "x86") + # x86/amd64 compiler flags, msvc/gcc/clang + if(MSVC) + set(ARROW_SSE4_2_FLAG "/arch:SSE4.2") + # These definitions are needed for xsimd to consider the corresponding instruction + # sets available, but they are not set by MSVC (unlike other compilers). + # See https://github.com/AcademySoftwareFoundation/OpenImageIO/issues/4265 + add_definitions(-D__SSE2__ -D__SSE4_1__ -D__SSE4_2__) + set(ARROW_AVX2_FLAGS "/arch:AVX2") + # MSVC has no specific flag for BMI2, it seems to be enabled with AVX2 + set(ARROW_BMI2_FLAGS "/arch:AVX2") + set(ARROW_AVX512_FLAG "/arch:AVX512") + set(CXX_SUPPORTS_SSE4_2 TRUE) + else() + set(ARROW_SSE4_2_FLAG "-msse4.2") + set(ARROW_AVX2_FLAGS "-march=haswell") + set(ARROW_BMI2_FLAG "-mbmi2") + # skylake-avx512 consists of AVX512F,AVX512BW,AVX512VL,AVX512CD,AVX512DQ + set(ARROW_AVX512_FLAG "-march=skylake-avx512") + # Append the avx2/avx512 subset option also, fix issue ARROW-9877 for homebrew-cpp + list(APPEND ARROW_AVX2_FLAGS "-mavx2") + set(ARROW_AVX512_FLAG + "${ARROW_AVX512_FLAG} -mavx512f -mavx512cd -mavx512vl -mavx512dq -mavx512bw") + check_cxx_compiler_flag(${ARROW_SSE4_2_FLAG} CXX_SUPPORTS_SSE4_2) + endif() + if(CMAKE_SIZEOF_VOID_P EQUAL 8) + # Check for AVX extensions on 64-bit systems only, as 32-bit support seems iffy + list(JOIN ARROW_AVX2_FLAGS " " ARROW_AVX2_FLAGS_COMMAND_LINE) + if(MINGW AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=54412 + message(STATUS "Disable AVX2 support on gcc / MINGW for now") + else() + check_cxx_compiler_flag("${ARROW_AVX2_FLAGS_COMMAND_LINE}" CXX_SUPPORTS_AVX2) + endif() + if(MINGW) + # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=65782 + message(STATUS "Disable AVX512 support on MINGW for now") + else() + # Check for AVX512 support in the compiler. + set(OLD_CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS}) + set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} ${ARROW_AVX512_FLAG}") + check_cxx_source_compiles(" + #ifdef _MSC_VER + #include + #else + #include + #endif + + int main() { + __m512i mask = _mm512_set1_epi32(0x1); + char out[32]; + _mm512_storeu_si512(out, mask); + return 0; + }" + CXX_SUPPORTS_AVX512) + set(CMAKE_REQUIRED_FLAGS ${OLD_CMAKE_REQUIRED_FLAGS}) + endif() + endif() + # Runtime SIMD level it can get from compiler and ARROW_RUNTIME_SIMD_LEVEL + if(CXX_SUPPORTS_SSE4_2 AND ARROW_RUNTIME_SIMD_LEVEL MATCHES + "^(SSE4_2|AVX2|AVX512|MAX)$") + set(ARROW_HAVE_RUNTIME_SSE4_2 ON) + add_definitions(-DARROW_HAVE_RUNTIME_SSE4_2) + endif() + # Note: for now we assume that AVX2 support should also enable BMI2 support, + # at least at compile-time (more care may be required for runtime dispatch). + if(CXX_SUPPORTS_AVX2 AND ARROW_RUNTIME_SIMD_LEVEL MATCHES "^(AVX2|AVX512|MAX)$") + set(ARROW_HAVE_RUNTIME_AVX2 ON) + set(ARROW_HAVE_RUNTIME_BMI2 ON) + add_definitions(-DARROW_HAVE_RUNTIME_AVX2 -DARROW_HAVE_RUNTIME_BMI2) + endif() + if(CXX_SUPPORTS_AVX512 AND ARROW_RUNTIME_SIMD_LEVEL MATCHES "^(AVX512|MAX)$") + set(ARROW_HAVE_RUNTIME_AVX512 ON) + add_definitions(-DARROW_HAVE_RUNTIME_AVX512) + endif() + if(ARROW_SIMD_LEVEL STREQUAL "DEFAULT") + set(ARROW_SIMD_LEVEL "SSE4_2") + endif() +elseif(ARROW_CPU_FLAG STREQUAL "ppc") + # power compiler flags, gcc/clang only + set(ARROW_ALTIVEC_FLAG "-maltivec") + check_cxx_compiler_flag(${ARROW_ALTIVEC_FLAG} CXX_SUPPORTS_ALTIVEC) + if(ARROW_SIMD_LEVEL STREQUAL "DEFAULT") + set(ARROW_SIMD_LEVEL "NONE") + endif() +elseif(ARROW_CPU_FLAG STREQUAL "aarch64") + # Arm64 compiler flags, gcc/clang only + set(ARROW_ARMV8_MARCH "armv8-a") + check_cxx_compiler_flag("-march=${ARROW_ARMV8_MARCH}+sve" CXX_SUPPORTS_SVE) + if(ARROW_SIMD_LEVEL STREQUAL "DEFAULT") + set(ARROW_SIMD_LEVEL "NEON") + endif() +endif() + +# Support C11 +if(NOT DEFINED CMAKE_C_STANDARD) + set(CMAKE_C_STANDARD 11) +endif() + +# This ensures that a standard higher than the minimum can be passed correctly +if(NOT DEFINED CMAKE_CXX_STANDARD) + set(CMAKE_CXX_STANDARD 20) +elseif(${CMAKE_CXX_STANDARD} VERSION_LESS 20) + message(FATAL_ERROR "Cannot set a CMAKE_CXX_STANDARD smaller than 20") +endif() + +# We require a C++20 compliant compiler +set(CMAKE_CXX_STANDARD_REQUIRED ON) + +# ARROW-6848: Do not use GNU (or other CXX) extensions +set(CMAKE_CXX_EXTENSIONS OFF) + +# Build with -fPIC so that can static link our libraries into other people's +# shared libraries +set(CMAKE_POSITION_INDEPENDENT_CODE ${ARROW_POSITION_INDEPENDENT_CODE}) + +set(UNKNOWN_COMPILER_MESSAGE + "Unknown compiler: ${CMAKE_CXX_COMPILER_ID} ${CMAKE_CXX_COMPILER_VERSION}") + +# Compiler flags used when building Arrow libraries (but not tests, utilities, etc.) +set(ARROW_LIBRARIES_ONLY_CXX_FLAGS) + +# compiler flags that are common across debug/release builds +if(WIN32) + # TODO(wesm): Change usages of C runtime functions that MSVC says are + # insecure, like std::getenv + add_definitions(-D_CRT_SECURE_NO_WARNINGS) + + if(MSVC) + # ARROW-1931 See https://github.com/google/googletest/issues/1318 + # + # This is added to CMAKE_CXX_FLAGS instead of CXX_COMMON_FLAGS since only the + # former is passed into the external projects + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /D_SILENCE_TR1_NAMESPACE_DEPRECATION_WARNING") + + if(CMAKE_CXX_COMPILER_ID STREQUAL "Clang") + # clang-cl + set(CXX_COMMON_FLAGS "-EHsc") + else() + # Fix annoying D9025 warning + string(REPLACE "/W3" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") + + # Set desired warning level (e.g. set /W4 for more warnings) + # + # ARROW-2986: Without /EHsc we get C4530 warning + set(CXX_COMMON_FLAGS "/W3 /EHsc") + endif() + + # Disable C5105 (macro expansion producing 'defined' has undefined + # behavior) warning because there are codes that produce this + # warning in Windows Kits. e.g.: + # + # #define _CRT_INTERNAL_NONSTDC_NAMES \ + # ( \ + # ( defined _CRT_DECLARE_NONSTDC_NAMES && _CRT_DECLARE_NONSTDC_NAMES) || \ + # (!defined _CRT_DECLARE_NONSTDC_NAMES && !__STDC__ ) \ + # ) + # + # See also: + # * C5105: https://docs.microsoft.com/en-US/cpp/error-messages/compiler-warnings/c5105 + # * Related reports: + # * https://developercommunity.visualstudio.com/content/problem/387684/c5105-with-stdioh-and-experimentalpreprocessor.html + # * https://developercommunity.visualstudio.com/content/problem/1249671/stdc17-generates-warning-compiling-windowsh.html + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} /wd5105") + + if(ARROW_USE_CCACHE) + foreach(c_flag + CMAKE_CXX_FLAGS + CMAKE_CXX_FLAGS_RELEASE + CMAKE_CXX_FLAGS_DEBUG + CMAKE_CXX_FLAGS_MINSIZEREL + CMAKE_CXX_FLAGS_RELWITHDEBINFO + CMAKE_C_FLAGS + CMAKE_C_FLAGS_RELEASE + CMAKE_C_FLAGS_DEBUG + CMAKE_C_FLAGS_MINSIZEREL + CMAKE_C_FLAGS_RELWITHDEBINFO) + # ccache doesn't work with /Zi. + # See also: https://github.com/ccache/ccache/issues/1040 + string(REPLACE "/Zi" "/Z7" ${c_flag} "${${c_flag}}") + endforeach() + endif() + + if(ARROW_USE_STATIC_CRT) + foreach(c_flag + CMAKE_CXX_FLAGS + CMAKE_CXX_FLAGS_RELEASE + CMAKE_CXX_FLAGS_DEBUG + CMAKE_CXX_FLAGS_MINSIZEREL + CMAKE_CXX_FLAGS_RELWITHDEBINFO + CMAKE_C_FLAGS + CMAKE_C_FLAGS_RELEASE + CMAKE_C_FLAGS_DEBUG + CMAKE_C_FLAGS_MINSIZEREL + CMAKE_C_FLAGS_RELWITHDEBINFO) + string(REPLACE "/MD" "/MT" ${c_flag} "${${c_flag}}") + endforeach() + endif() + + # Support large object code + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} /bigobj") + + # We may use UTF-8 in source code such as + # cpp/src/arrow/compute/kernels/scalar_string_test.cc + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} /utf-8") + else() + # MinGW + check_cxx_compiler_flag(-Wa,-mbig-obj CXX_SUPPORTS_BIG_OBJ) + if(CXX_SUPPORTS_BIG_OBJ) + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wa,-mbig-obj") + endif() + endif(MSVC) +else() + # Common flags set below with warning level + set(CXX_COMMON_FLAGS "") +endif() + +# BUILD_WARNING_LEVEL add warning/error compiler flags. The possible values are +# - PRODUCTION: Build with `-Wall` but do not add `-Werror`, so warnings do not +# halt the build. +# - CHECKIN: Build with `-Wall` and `-Wextra`. Also, add `-Werror` in debug mode +# so that any important warnings fail the build. +# - EVERYTHING: Like `CHECKIN`, but possible extra flags depending on the +# compiler, including `-Wextra`, `-Weverything`, `-pedantic`. +# This is the most aggressive warning level. + +# Defaults BUILD_WARNING_LEVEL to `CHECKIN`, unless CMAKE_BUILD_TYPE is +# `RELEASE`, then it will default to `PRODUCTION`. The goal of defaulting to +# `CHECKIN` is to avoid friction with long response time from CI. +if(NOT BUILD_WARNING_LEVEL) + if("${UPPERCASE_BUILD_TYPE}" STREQUAL "RELEASE") + set(BUILD_WARNING_LEVEL PRODUCTION) + else() + set(BUILD_WARNING_LEVEL CHECKIN) + endif() +endif(NOT BUILD_WARNING_LEVEL) +string(TOUPPER ${BUILD_WARNING_LEVEL} BUILD_WARNING_LEVEL) + +message(STATUS "Arrow build warning level: ${BUILD_WARNING_LEVEL}") + +macro(arrow_add_werror_if_debug) + # Treat all compiler warnings as errors + if(MSVC) + string(APPEND CMAKE_C_FLAGS_DEBUG " /WX") + string(APPEND CMAKE_CXX_FLAGS_DEBUG " /WX") + else() + string(APPEND CMAKE_C_FLAGS_DEBUG " -Werror") + string(APPEND CMAKE_CXX_FLAGS_DEBUG " -Werror") + endif() +endmacro() + +if("${BUILD_WARNING_LEVEL}" STREQUAL "CHECKIN") + # Pre-checkin builds + if(MSVC) + # https://docs.microsoft.com/en-us/cpp/error-messages/compiler-warnings/compiler-warnings-by-compiler-version + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} /W3") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} /wd4365") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} /wd4267") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} /wd4838") + elseif(CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang" + OR CMAKE_CXX_COMPILER_ID STREQUAL "Clang" + OR CMAKE_CXX_COMPILER_ID STREQUAL "IBMClang") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wall") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wextra") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wdocumentation") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -DARROW_WARN_DOCUMENTATION") + if(CMAKE_SYSTEM_NAME STREQUAL "Emscripten") + # size_t is 32 bit in Emscripten wasm32 - ignore conversion errors + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wno-shorten-64-to-32") + else() + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wshorten-64-to-32") + endif() + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wno-missing-braces") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wno-unused-parameter") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wno-constant-logical-operand") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wno-return-stack-address") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wdate-time") + elseif(CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wall") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wno-conversion") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wno-sign-conversion") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wdate-time") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wimplicit-fallthrough") + string(APPEND CXX_ONLY_FLAGS " -Wredundant-move") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wunused-result") + # Flag non-static functions that don't have corresponding declaration in a .h file. + # Only for Arrow libraries, since this is not a problem in tests or utilities. + list(APPEND ARROW_LIBRARIES_ONLY_CXX_FLAGS "-Wmissing-declarations") + elseif(CMAKE_CXX_COMPILER_ID STREQUAL "Intel" OR CMAKE_CXX_COMPILER_ID STREQUAL + "IntelLLVM") + if(WIN32) + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} /Wall") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} /Wno-deprecated") + else() + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wall") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wno-deprecated") + endif() + else() + message(FATAL_ERROR "${UNKNOWN_COMPILER_MESSAGE}") + endif() + arrow_add_werror_if_debug() + +elseif("${BUILD_WARNING_LEVEL}" STREQUAL "EVERYTHING") + # Pedantic builds for fixing warnings + if(MSVC) + string(REPLACE "/W3" "" CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS}") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} /Wall") + # https://docs.microsoft.com/en-us/cpp/build/reference/compiler-option-warning-level + # /wdnnnn disables a warning where "nnnn" is a warning number + elseif(CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang" OR CMAKE_CXX_COMPILER_ID STREQUAL + "Clang") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Weverything") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wno-c++98-compat") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wno-c++98-compat-pedantic") + elseif(CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wall") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wpedantic") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wextra") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wno-unused-parameter") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wunused-result") + elseif(CMAKE_CXX_COMPILER_ID STREQUAL "Intel" OR CMAKE_CXX_COMPILER_ID STREQUAL + "IntelLLVM") + if(WIN32) + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} /Wall") + else() + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wall") + endif() + else() + message(FATAL_ERROR "${UNKNOWN_COMPILER_MESSAGE}") + endif() + arrow_add_werror_if_debug() + +else() + # Production builds (warning are not treated as errors) + if(MSVC) + # https://docs.microsoft.com/en-us/cpp/build/reference/compiler-option-warning-level + # TODO: Enable /Wall and disable individual warnings until build compiles without errors + # /wdnnnn disables a warning where "nnnn" is a warning number + string(REPLACE "/W3" "" CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS}") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} /W3") + elseif(CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang" + OR CMAKE_CXX_COMPILER_ID STREQUAL "Clang" + OR CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wall") + elseif(CMAKE_CXX_COMPILER_ID STREQUAL "Intel" OR CMAKE_CXX_COMPILER_ID STREQUAL + "IntelLLVM") + if(WIN32) + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} /Wall") + else() + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wall") + endif() + else() + message(FATAL_ERROR "${UNKNOWN_COMPILER_MESSAGE}") + endif() + +endif() + +if(MSVC) + # Disable annoying "performance warning" about int-to-bool conversion + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} /wd4800") + + # Disable unchecked iterator warnings, equivalent to /D_SCL_SECURE_NO_WARNINGS + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} /wd4996") + + # Disable "switch statement contains 'default' but no 'case' labels" warning + # (required for protobuf, see https://github.com/protocolbuffers/protobuf/issues/6885) + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} /wd4065") + +elseif(CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + if(CMAKE_CXX_COMPILER_VERSION VERSION_EQUAL "7.0" OR CMAKE_CXX_COMPILER_VERSION + VERSION_GREATER "7.0") + # Without this, gcc >= 7 warns related to changes in C++17 + set(CXX_ONLY_FLAGS "${CXX_ONLY_FLAGS} -Wno-noexcept-type") + endif() + + if(CMAKE_CXX_COMPILER_VERSION VERSION_EQUAL "13.0" OR CMAKE_CXX_COMPILER_VERSION + VERSION_GREATER "13.0") + # -Wself-move added in GCC 13 warns when a value is moved to itself + # See https://gcc.gnu.org/gcc-13/changes.html + set(CXX_ONLY_FLAGS "${CXX_ONLY_FLAGS} -Wno-self-move") + endif() + + # Disabling semantic interposition allows faster calling conventions + # when calling global functions internally, and can also help inlining. + # See https://stackoverflow.com/questions/35745543/new-option-in-gcc-5-3-fno-semantic-interposition + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -fno-semantic-interposition") + + # Add colors when paired with ninja + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fdiagnostics-color=always") + + if(CMAKE_UNITY_BUILD) + # Work around issue similar to https://bugs.webkit.org/show_bug.cgi?id=176869 + set(CXX_ONLY_FLAGS "${CXX_ONLY_FLAGS} -Wno-subobject-linkage") + endif() + +elseif(CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang" OR CMAKE_CXX_COMPILER_ID STREQUAL + "Clang") + # Clang options for all builds + + # Using Clang with ccache causes a bunch of spurious warnings that are + # purportedly fixed in the next version of ccache. See the following for details: + # + # http://petereisentraut.blogspot.com/2011/05/ccache-and-clang.html + # http://petereisentraut.blogspot.com/2011/09/ccache-and-clang-part-2.html + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Qunused-arguments") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Qunused-arguments") + + # Avoid error when an unknown warning flag is passed + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wno-unknown-warning-option") + # Add colors when paired with ninja + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fcolor-diagnostics") + + # Don't complain about optimization passes that were not possible + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wno-pass-failed") + + if(APPLE) + # Avoid clang / libc++ error about C++17 aligned allocation on macOS. + # See https://chromium.googlesource.com/chromium/src/+/eee44569858fc650b635779c4e34be5cb0c73186%5E%21/#F0 + # for details. + string(APPEND CXX_ONLY_FLAGS " -fno-aligned-new") + + if(CMAKE_HOST_SYSTEM_VERSION VERSION_LESS 20) + # Avoid C++17 std::get 'not available' issue on macOS 10.13 + # This will be required until at least R 4.4 is released and + # CRAN (hopefully) stops checking on 10.13 + string(APPEND CXX_ONLY_FLAGS " -D_LIBCPP_DISABLE_AVAILABILITY") + endif() + endif() +endif() + +# if build warning flags is set, add to CXX_COMMON_FLAGS +if(BUILD_WARNING_FLAGS) + # Use BUILD_WARNING_FLAGS with BUILD_WARNING_LEVEL=everything to disable + # warnings (use with Clang's -Weverything flag to find potential errors) + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} ${BUILD_WARNING_FLAGS}") +endif(BUILD_WARNING_FLAGS) + +# Only enable additional instruction sets if they are supported +if(ARROW_CPU_FLAG STREQUAL "x86") + if(MINGW) + # Enable _xgetbv() intrinsic to query OS support for ZMM register saves + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -mxsave") + endif() + if(ARROW_SIMD_LEVEL STREQUAL "AVX512") + if(NOT CXX_SUPPORTS_AVX512) + message(FATAL_ERROR "AVX512 required but compiler doesn't support it.") + endif() + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} ${ARROW_AVX512_FLAG}") + add_definitions(-DARROW_HAVE_AVX512 -DARROW_HAVE_AVX2 -DARROW_HAVE_BMI2 + -DARROW_HAVE_SSE4_2) + elseif(ARROW_SIMD_LEVEL STREQUAL "AVX2") + if(NOT CXX_SUPPORTS_AVX2) + message(FATAL_ERROR "AVX2 required but compiler doesn't support it.") + endif() + list(JOIN ARROW_AVX2_FLAGS " " ARROW_AVX2_FLAGS_COMMAND_LINE) + string(APPEND CXX_COMMON_FLAGS " ${ARROW_AVX2_FLAGS_COMMAND_LINE}") + add_definitions(-DARROW_HAVE_AVX2 -DARROW_HAVE_BMI2 -DARROW_HAVE_SSE4_2) + elseif(ARROW_SIMD_LEVEL STREQUAL "SSE4_2") + if(NOT CXX_SUPPORTS_SSE4_2) + message(FATAL_ERROR "SSE4.2 required but compiler doesn't support it.") + endif() + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} ${ARROW_SSE4_2_FLAG}") + add_definitions(-DARROW_HAVE_SSE4_2) + elseif(NOT ARROW_SIMD_LEVEL STREQUAL "NONE") + message(WARNING "ARROW_SIMD_LEVEL=${ARROW_SIMD_LEVEL} not supported by x86.") + endif() +endif() + +if(ARROW_CPU_FLAG STREQUAL "ppc") + if(CXX_SUPPORTS_ALTIVEC AND ARROW_ALTIVEC) + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} ${ARROW_ALTIVEC_FLAG}") + endif() +endif() + +if(ARROW_CPU_FLAG STREQUAL "aarch64") + if(ARROW_SIMD_LEVEL MATCHES "NEON|SVE[0-9]*") + set(ARROW_HAVE_NEON ON) + add_definitions(-DARROW_HAVE_NEON) + if(ARROW_SIMD_LEVEL MATCHES "SVE[0-9]*") + if(NOT CXX_SUPPORTS_SVE) + message(FATAL_ERROR "SVE required but compiler doesn't support it.") + endif() + # -march=armv8-a+sve + set(ARROW_ARMV8_MARCH "${ARROW_ARMV8_MARCH}+sve") + string(REGEX MATCH "[0-9]+" SVE_VECTOR_BITS ${ARROW_SIMD_LEVEL}) + if(SVE_VECTOR_BITS) + set(ARROW_HAVE_SVE${SVE_VECTOR_BITS} ON) + add_definitions(-DARROW_HAVE_SVE${SVE_VECTOR_BITS}) + # -msve-vector-bits=256 + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -msve-vector-bits=${SVE_VECTOR_BITS}") + else() + set(ARROW_HAVE_SVE_SIZELESS ON) + add_definitions(-DARROW_HAVE_SVE_SIZELESS) + endif() + endif() + set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -march=${ARROW_ARMV8_MARCH}") + elseif(NOT ARROW_SIMD_LEVEL STREQUAL "NONE") + message(WARNING "ARROW_SIMD_LEVEL=${ARROW_SIMD_LEVEL} not supported by Arm.") + endif() +endif() + +if(NOT WIN32 AND NOT APPLE) + if(ARROW_USE_MOLD) + find_program(LD_MOLD ld.mold) + if(LD_MOLD) + unset(MOLD_LINKER_FLAGS) + if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL "12.1.0") + set(MOLD_LINKER_FLAGS "-fuse-ld=mold") + else() + message(STATUS "Need GCC 12.1.0 or later to use mold linker: ${CMAKE_CXX_COMPILER_VERSION}" + ) + endif() + elseif(CMAKE_CXX_COMPILER_ID STREQUAL "Clang") + if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL "12.0.0") + set(MOLD_LINKER_FLAGS "--ld-path=${LD_MOLD}") + else() + message(STATUS "Need clang 12.0.0 or later to use mold linker: ${CMAKE_CXX_COMPILER_VERSION}" + ) + endif() + else() + message(STATUS "Using the default linker because compiler doesn't support mold: ${CMAKE_CXX_COMPILER_ID}" + ) + endif() + if(MOLD_LINKER_FLAGS) + message(STATUS "Using optional mold linker") + string(APPEND CMAKE_EXE_LINKER_FLAGS " ${MOLD_LINKER_FLAGS}") + string(APPEND CMAKE_MODULE_LINKER_FLAGS " ${MOLD_LINKER_FLAGS}") + string(APPEND CMAKE_SHARED_LINKER_FLAGS " ${MOLD_LINKER_FLAGS}") + endif() + else() + message(STATUS "Using the default linker because mold isn't found") + endif() + endif() +endif() + +if(ARROW_USE_LLD) + find_program(LD_LLD ld.lld) + if(LD_LLD) + unset(LLD_LINKER_FLAGS) + if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL "9.1.0") + set(LLD_LINKER_FLAGS "-fuse-ld=lld") + else() + message(STATUS "Need GCC 9.1.0 or later to use LLD linker: ${CMAKE_CXX_COMPILER_VERSION}" + ) + endif() + elseif(CMAKE_CXX_COMPILER_ID STREQUAL "Clang") + if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL "12.0.0") + set(LLD_LINKER_FLAGS "--ld-path=${LD_LLD}") + else() + message(STATUS "Need clang 12.0.0 or later to use LLD linker: ${CMAKE_CXX_COMPILER_VERSION}" + ) + endif() + else() + message(STATUS "Using the default linker because compiler doesn't support LLD: ${CMAKE_CXX_COMPILER_ID}" + ) + endif() + if(LLD_LINKER_FLAGS) + message(STATUS "Using optional LLVM LLD linker") + string(APPEND CMAKE_EXE_LINKER_FLAGS " ${LLD_LINKER_FLAGS}") + string(APPEND CMAKE_MODULE_LINKER_FLAGS " ${LLD_LINKER_FLAGS}") + string(APPEND CMAKE_SHARED_LINKER_FLAGS " ${LLD_LINKER_FLAGS}") + else() + message(STATUS "Using the default linker because the LLD isn't supported") + endif() + endif() +endif() + +# compiler flags for different build types (run 'cmake -DCMAKE_BUILD_TYPE= .') +# For all builds: +# For CMAKE_BUILD_TYPE=Debug +# -ggdb: Enable gdb debugging +# For CMAKE_BUILD_TYPE=Release +# -O2 (not -O3): Enable compiler optimizations +# Debug symbols are stripped for reduced binary size. +# For CMAKE_BUILD_TYPE=RelWithDebInfo +# Same as Release, except with debug symbols enabled. + +if(NOT MSVC) + set(C_RELEASE_FLAGS "") + if(CMAKE_C_FLAGS_RELEASE MATCHES "-O3") + string(APPEND C_RELEASE_FLAGS " -O2") + endif() + set(CXX_RELEASE_FLAGS "") + if(CMAKE_CXX_FLAGS_RELEASE MATCHES "-O3") + string(APPEND CXX_RELEASE_FLAGS " -O2") + endif() + set(C_RELWITHDEBINFO_FLAGS "") + if(CMAKE_C_FLAGS_RELWITHDEBINFO MATCHES "-O3") + string(APPEND C_RELWITHDEBINFO_FLAGS " -O2") + endif() + set(CXX_RELWITHDEBINFO_FLAGS "") + if(CMAKE_CXX_FLAGS_RELWITHDEBINFO MATCHES "-O3") + string(APPEND CXX_RELWITHDEBINFO_FLAGS " -O2") + endif() + if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + string(APPEND C_RELEASE_FLAGS " -ftree-vectorize") + string(APPEND CXX_RELEASE_FLAGS " -ftree-vectorize") + string(APPEND C_RELWITHDEBINFO_FLAGS " -ftree-vectorize") + string(APPEND CXX_RELWITHDEBINFO_FLAGS " -ftree-vectorize") + endif() + set(C_DEBUG_FLAGS "") + set(CXX_DEBUG_FLAGS "") + if(NOT MSVC) + if(CMAKE_SYSTEM_NAME STREQUAL "Emscripten") + # with -g it uses DWARF debug info, which is really slow to build + # on emscripten (and uses tons of memory) + string(REPLACE "-g" " " CMAKE_CXX_FLAGS_DEBUG ${CMAKE_CXX_FLAGS_DEBUG}) + string(REPLACE "-g" " " CMAKE_C_FLAGS_DEBUG ${CMAKE_C_FLAGS_DEBUG}) + string(APPEND C_DEBUG_FLAGS " -g2") + string(APPEND CXX_DEBUG_FLAGS " -g2") + string(APPEND C_RELWITHDEBINFO_FLAGS " -g2") + string(APPEND CXX_RELWITHDEBINFO_FLAGS " -g2") + # without -O1, emscripten executables are *MASSIVE*. Don't use -O0 + if(NOT CMAKE_C_FLAGS_DEBUG MATCHES "-O") + string(APPEND C_DEBUG_FLAGS " -O1") + endif() + if(NOT CMAKE_CXX_FLAGS_DEBUG MATCHES "-O") + string(APPEND CXX_DEBUG_FLAGS " -O1") + endif() + else() + if(NOT CMAKE_C_FLAGS_DEBUG MATCHES "-O") + string(APPEND C_DEBUG_FLAGS " -O0") + endif() + if(NOT CMAKE_CXX_FLAGS_DEBUG MATCHES "-O") + string(APPEND CXX_DEBUG_FLAGS " -O0") + endif() + + if(ARROW_GGDB_DEBUG) + string(APPEND C_DEBUG_FLAGS " -ggdb") + string(APPEND CXX_DEBUG_FLAGS " -ggdb") + string(APPEND C_RELWITHDEBINFO_FLAGS " -ggdb") + string(APPEND CXX_RELWITHDEBINFO_FLAGS " -ggdb") + endif() + endif() + endif() + + string(APPEND CMAKE_C_FLAGS_RELEASE "${C_RELEASE_FLAGS} ${ARROW_C_FLAGS_RELEASE}") + string(APPEND CMAKE_CXX_FLAGS_RELEASE "${CXX_RELEASE_FLAGS} ${ARROW_CXX_FLAGS_RELEASE}") + string(APPEND CMAKE_C_FLAGS_DEBUG "${C_DEBUG_FLAGS} ${ARROW_C_FLAGS_DEBUG}") + string(APPEND CMAKE_CXX_FLAGS_DEBUG "${CXX_DEBUG_FLAGS} ${ARROW_CXX_FLAGS_DEBUG}") + string(APPEND CMAKE_C_FLAGS_RELWITHDEBINFO + "${C_RELWITHDEBINFO_FLAGS} ${ARROW_C_FLAGS_RELWITHDEBINFO}") + string(APPEND CMAKE_CXX_FLAGS_RELWITHDEBINFO + "${CXX_RELWITHDEBINFO_FLAGS} ${ARROW_CXX_FLAGS_RELWITHDEBINFO}") +endif() + +message(STATUS "Build Type: ${CMAKE_BUILD_TYPE}") + +# ---------------------------------------------------------------------- +# MSVC-specific linker options + +if(MSVC) + set(MSVC_LINKER_FLAGS) + if(MSVC_LINK_VERBOSE) + set(MSVC_LINKER_FLAGS "${MSVC_LINKER_FLAGS} /VERBOSE:LIB") + endif() + if(NOT ARROW_USE_STATIC_CRT) + set(MSVC_LINKER_FLAGS "${MSVC_LINKER_FLAGS} /NODEFAULTLIB:LIBCMT") + set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${MSVC_LINKER_FLAGS}") + set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${MSVC_LINKER_FLAGS}") + set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${MSVC_LINKER_FLAGS}") + endif() +endif() + +if(CMAKE_SYSTEM_NAME STREQUAL "Emscripten") + # flags are: + # 1) We force *everything* to build as position independent + # 2) And with support for C++ exceptions + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fPIC -fexceptions") + # deprecated-literal-operator error is thrown in datetime (vendored lib in arrow) + set(CMAKE_CXX_FLAGS + "${CMAKE_CXX_FLAGS} -fPIC -fexceptions -Wno-error=deprecated-literal-operator") + + # flags for creating shared libraries (only used in pyarrow, because + # Emscripten builds libarrow as static) + # flags are: + # 1) Tell it to use JavaScript / WebAssembly 64 bit number support. + # 2) Tell it to build with support for C++ exceptions + # 3) Skip linker flags error which happens with -soname parameter + set(ARROW_EMSCRIPTEN_LINKER_FLAGS "-sWASM_BIGINT=1 -fexceptions -Wno-error=linkflags") + set(CMAKE_SHARED_LIBRARY_CREATE_C_FLAGS + "-sSIDE_MODULE=1 ${ARROW_EMSCRIPTEN_LINKER_FLAGS}") + set(CMAKE_SHARED_LIBRARY_CREATE_CXX_FLAGS + "-sSIDE_MODULE=1 ${ARROW_EMSCRIPTEN_LINKER_FLAGS}") + set(CMAKE_SHARED_LINKER_FLAGS "-sSIDE_MODULE=1 ${ARROW_EMSCRIPTEN_LINKER_FLAGS}") + if(ARROW_TESTING) + # flags for building test executables for use in node + if("${UPPERCASE_BUILD_TYPE}" STREQUAL "RELEASE") + set(CMAKE_EXE_LINKER_FLAGS + "${ARROW_EMSCRIPTEN_LINKER_FLAGS} -sALLOW_MEMORY_GROWTH -lnodefs.js -lnoderawfs.js --pre-js ${BUILD_SUPPORT_DIR}/emscripten-test-init.js" + ) + else() + set(CMAKE_EXE_LINKER_FLAGS + "${ARROW_EMSCRIPTEN_LINKER_FLAGS} -sERROR_ON_WASM_CHANGES_AFTER_LINK=1 -sALLOW_MEMORY_GROWTH -lnodefs.js -lnoderawfs.js --pre-js ${BUILD_SUPPORT_DIR}/emscripten-test-init.js" + ) + endif() + else() + set(CMAKE_EXE_LINKER_FLAGS "${ARROW_EMSCRIPTEN_LINKER_FLAGS} -sALLOW_MEMORY_GROWTH") + endif() +endif() diff --git a/python/cmake_modules/ThirdpartyToolchain.cmake b/python/cmake_modules/ThirdpartyToolchain.cmake new file mode 100644 index 000000000000..935584c5349c --- /dev/null +++ b/python/cmake_modules/ThirdpartyToolchain.cmake @@ -0,0 +1,4128 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +include(ProcessorCount) +processorcount(NPROC) + +# Accumulate all bundled targets and we will splice them together later as +# libarrow_bundled_dependencies.a so that third party libraries have something +# usable to create statically-linked builds with some BUNDLED dependencies, +# including allocators like jemalloc and mimalloc +set(ARROW_BUNDLED_STATIC_LIBS) + +# Accumulate all system dependencies to provide suitable static link +# parameters to the third party libraries. +set(ARROW_SYSTEM_DEPENDENCIES) +set(ARROW_FLIGHT_SYSTEM_DEPENDENCIES) +set(ARROW_TESTING_SYSTEM_DEPENDENCIES) +set(PARQUET_SYSTEM_DEPENDENCIES) + +# ---------------------------------------------------------------------- +# Toolchain linkage options + +set(ARROW_RE2_LINKAGE + "static" + CACHE STRING "How to link the re2 library. static|shared (default static)") + +# ---------------------------------------------------------------------- +# Resolve the dependencies + +set(ARROW_THIRDPARTY_DEPENDENCIES + absl + AWSSDK + Azure + benchmark + Boost + Brotli + BZip2 + c-ares + gflags + glog + google_cloud_cpp_storage + gRPC + GTest + jemalloc + LLVM + lz4 + nlohmann_json + opentelemetry-cpp + orc + re2 + Protobuf + RapidJSON + Snappy + Substrait + Thrift + utf8proc + xsimd + ZLIB + zstd) + +# For backward compatibility. We use "BOOST_SOURCE" if "Boost_SOURCE" +# isn't specified and "BOOST_SOURCE" is specified. +# We renamed "BOOST" dependency name to "Boost" in 3.0.0 because +# upstreams (CMake and Boost) use "Boost" not "BOOST" as package name. +if("${Boost_SOURCE}" STREQUAL "" AND NOT "${BOOST_SOURCE}" STREQUAL "") + set(Boost_SOURCE ${BOOST_SOURCE}) +endif() + +# For backward compatibility. We use "RE2_SOURCE" if "re2_SOURCE" +# isn't specified and "RE2_SOURCE" is specified. +# We renamed "RE2" dependency name to "re2" in 3.0.0 because +# upstream uses "re2" not "RE2" as package name. +if("${re2_SOURCE}" STREQUAL "" AND NOT "${RE2_SOURCE}" STREQUAL "") + set(re2_SOURCE ${RE2_SOURCE}) +endif() + +# For backward compatibility. We use "ORC_SOURCE" if "orc_SOURCE" +# isn't specified and "ORC_SOURCE" is specified. +# We renamed "ORC" dependency name to "orc" in 15.0.0 because +# upstream uses "orc" not "ORC" as package name. +if("${orc_SOURCE}" STREQUAL "" AND NOT "${ORC_SOURCE}" STREQUAL "") + set(orc_SOURCE ${ORC_SOURCE}) +endif() + +# For backward compatibility. We use "RE2_ROOT" if "re2_ROOT" +# isn't specified and "RE2_ROOT" is specified. +if("${re2_ROOT}" STREQUAL "" AND NOT "${RE2_ROOT}" STREQUAL "") + set(re2_ROOT ${RE2_ROOT}) +endif() + +# For backward compatibility. We use "Lz4_SOURCE" if "lz4_SOURCE" +# isn't specified and "lz4_SOURCE" is specified. +# We renamed "Lz4" dependency name to "lz4" in 9.0.0 because +# upstream uses "lz4" not "Lz4" as package name. +if("${lz4_SOURCE}" STREQUAL "" AND NOT "${Lz4_SOURCE}" STREQUAL "") + set(lz4_SOURCE ${Lz4_SOURCE}) +endif() + +# For backward compatibility. We use "GLOG_SOURCE" if "glog_SOURCE" +# isn't specified and "GLOG_SOURCE" is specified. +# We renamed "GLOG" dependency name to "glog" in 16.0.0 because +# upstream uses "glog" not "GLOG" as package name. +if("${glog_SOURCE}" STREQUAL "" AND NOT "${GLOG_SOURCE}" STREQUAL "") + set(glog_SOURCE ${GLOG_SOURCE}) +endif() + +# For backward compatibility. We use bundled jemalloc by default. +if("${jemalloc_SOURCE}" STREQUAL "") + set(jemalloc_SOURCE "BUNDLED") +endif() + +message(STATUS "Using ${ARROW_DEPENDENCY_SOURCE} approach to find dependencies") + +if(ARROW_DEPENDENCY_SOURCE STREQUAL "CONDA") + if(MSVC) + set(ARROW_PACKAGE_PREFIX "$ENV{CONDA_PREFIX}/Library") + else() + set(ARROW_PACKAGE_PREFIX $ENV{CONDA_PREFIX}) + endif() + set(ARROW_ACTUAL_DEPENDENCY_SOURCE "SYSTEM") + # GoogleTest provided by conda can't be used on macOS because it's + # built with C++14. So we accept auto fallback only for GoogleTest. + if("${GTest_SOURCE}" STREQUAL "") + set(GTest_SOURCE "AUTO") + endif() + message(STATUS "Using CONDA_PREFIX for ARROW_PACKAGE_PREFIX: ${ARROW_PACKAGE_PREFIX}") +else() + set(ARROW_ACTUAL_DEPENDENCY_SOURCE "${ARROW_DEPENDENCY_SOURCE}") +endif() + +if(ARROW_PACKAGE_PREFIX) + message(STATUS "Setting (unset) dependency *_ROOT variables: ${ARROW_PACKAGE_PREFIX}") + set(ENV{PKG_CONFIG_PATH} "${ARROW_PACKAGE_PREFIX}/lib/pkgconfig/") + + if(NOT ENV{BOOST_ROOT}) + set(ENV{BOOST_ROOT} ${ARROW_PACKAGE_PREFIX}) + endif() + if(NOT ENV{Boost_ROOT}) + set(ENV{Boost_ROOT} ${ARROW_PACKAGE_PREFIX}) + endif() + if(NOT DEFINED OPENSSL_ROOT_DIR) + set(OPENSSL_ROOT_DIR ${ARROW_PACKAGE_PREFIX}) + endif() +endif() + +# For each dependency, set dependency source to global default, if unset +foreach(DEPENDENCY ${ARROW_THIRDPARTY_DEPENDENCIES}) + if("${${DEPENDENCY}_SOURCE}" STREQUAL "") + set(${DEPENDENCY}_SOURCE ${ARROW_ACTUAL_DEPENDENCY_SOURCE}) + # If no ROOT was supplied and we have a global prefix, use it + if(NOT ${DEPENDENCY}_ROOT AND ARROW_PACKAGE_PREFIX) + set(${DEPENDENCY}_ROOT ${ARROW_PACKAGE_PREFIX}) + endif() + endif() +endforeach() + +macro(build_dependency DEPENDENCY_NAME) + if("${DEPENDENCY_NAME}" STREQUAL "absl") + build_absl() + elseif("${DEPENDENCY_NAME}" STREQUAL "AWSSDK") + build_awssdk() + elseif("${DEPENDENCY_NAME}" STREQUAL "Azure") + build_azure_sdk() + elseif("${DEPENDENCY_NAME}" STREQUAL "benchmark") + build_benchmark() + elseif("${DEPENDENCY_NAME}" STREQUAL "Boost") + build_boost() + elseif("${DEPENDENCY_NAME}" STREQUAL "Brotli") + build_brotli() + elseif("${DEPENDENCY_NAME}" STREQUAL "BZip2") + build_bzip2() + elseif("${DEPENDENCY_NAME}" STREQUAL "c-ares") + build_cares() + elseif("${DEPENDENCY_NAME}" STREQUAL "gflags") + build_gflags() + elseif("${DEPENDENCY_NAME}" STREQUAL "glog") + build_glog() + elseif("${DEPENDENCY_NAME}" STREQUAL "google_cloud_cpp_storage") + build_google_cloud_cpp_storage() + elseif("${DEPENDENCY_NAME}" STREQUAL "gRPC") + build_grpc() + elseif("${DEPENDENCY_NAME}" STREQUAL "GTest") + build_gtest() + elseif("${DEPENDENCY_NAME}" STREQUAL "jemalloc") + build_jemalloc() + elseif("${DEPENDENCY_NAME}" STREQUAL "lz4") + build_lz4() + elseif("${DEPENDENCY_NAME}" STREQUAL "nlohmann_json") + build_nlohmann_json() + elseif("${DEPENDENCY_NAME}" STREQUAL "opentelemetry-cpp") + build_opentelemetry() + elseif("${DEPENDENCY_NAME}" STREQUAL "orc") + build_orc() + elseif("${DEPENDENCY_NAME}" STREQUAL "Protobuf") + build_protobuf() + elseif("${DEPENDENCY_NAME}" STREQUAL "RapidJSON") + build_rapidjson() + elseif("${DEPENDENCY_NAME}" STREQUAL "re2") + build_re2() + elseif("${DEPENDENCY_NAME}" STREQUAL "Snappy") + build_snappy() + elseif("${DEPENDENCY_NAME}" STREQUAL "Substrait") + build_substrait() + elseif("${DEPENDENCY_NAME}" STREQUAL "Thrift") + build_thrift() + elseif("${DEPENDENCY_NAME}" STREQUAL "utf8proc") + build_utf8proc() + elseif("${DEPENDENCY_NAME}" STREQUAL "xsimd") + build_xsimd() + elseif("${DEPENDENCY_NAME}" STREQUAL "ZLIB") + build_zlib() + elseif("${DEPENDENCY_NAME}" STREQUAL "zstd") + build_zstd() + else() + message(FATAL_ERROR "Unknown thirdparty dependency to build: ${DEPENDENCY_NAME}") + endif() +endmacro() + +function(provide_cmake_module MODULE_NAME ARROW_CMAKE_PACKAGE_NAME) + set(module "${CMAKE_SOURCE_DIR}/cmake_modules/${MODULE_NAME}.cmake") + if(EXISTS "${module}") + message(STATUS "Providing CMake module for ${MODULE_NAME} as part of ${ARROW_CMAKE_PACKAGE_NAME} CMake package" + ) + install(FILES "${module}" + DESTINATION "${ARROW_CMAKE_DIR}/${ARROW_CMAKE_PACKAGE_NAME}") + endif() +endfunction() + +# Find modules are needed by the consumer in case of a static build, or if the +# linkage is PUBLIC or INTERFACE. +function(provide_find_module PACKAGE_NAME ARROW_CMAKE_PACKAGE_NAME) + provide_cmake_module("Find${PACKAGE_NAME}" ${ARROW_CMAKE_PACKAGE_NAME}) +endfunction() + +macro(resolve_dependency DEPENDENCY_NAME) + set(options) + set(one_value_args + ARROW_CMAKE_PACKAGE_NAME + ARROW_PC_PACKAGE_NAME + FORCE_ANY_NEWER_VERSION + HAVE_ALT + IS_RUNTIME_DEPENDENCY + REQUIRED_VERSION + USE_CONFIG) + set(multi_value_args COMPONENTS OPTIONAL_COMPONENTS PC_PACKAGE_NAMES) + cmake_parse_arguments(ARG + "${options}" + "${one_value_args}" + "${multi_value_args}" + ${ARGN}) + if(ARG_UNPARSED_ARGUMENTS) + message(SEND_ERROR "Error: unrecognized arguments: ${ARG_UNPARSED_ARGUMENTS}") + endif() + if("${ARG_IS_RUNTIME_DEPENDENCY}" STREQUAL "") + set(ARG_IS_RUNTIME_DEPENDENCY TRUE) + endif() + + if(ARG_HAVE_ALT) + set(PACKAGE_NAME "${DEPENDENCY_NAME}Alt") + else() + set(PACKAGE_NAME ${DEPENDENCY_NAME}) + endif() + set(FIND_PACKAGE_ARGUMENTS ${PACKAGE_NAME}) + if(ARG_REQUIRED_VERSION AND NOT ARG_FORCE_ANY_NEWER_VERSION) + list(APPEND FIND_PACKAGE_ARGUMENTS ${ARG_REQUIRED_VERSION}) + endif() + if(ARG_USE_CONFIG) + list(APPEND FIND_PACKAGE_ARGUMENTS CONFIG) + endif() + if(ARG_COMPONENTS) + list(APPEND FIND_PACKAGE_ARGUMENTS COMPONENTS ${ARG_COMPONENTS}) + endif() + if(ARG_OPTIONAL_COMPONENTS) + list(APPEND FIND_PACKAGE_ARGUMENTS OPTIONAL_COMPONENTS ${ARG_OPTIONAL_COMPONENTS}) + endif() + if(${DEPENDENCY_NAME}_SOURCE STREQUAL "AUTO") + find_package(${FIND_PACKAGE_ARGUMENTS}) + set(COMPATIBLE ${${PACKAGE_NAME}_FOUND}) + if(COMPATIBLE + AND ARG_FORCE_ANY_NEWER_VERSION + AND ARG_REQUIRED_VERSION) + if(${${PACKAGE_NAME}_VERSION} VERSION_LESS ${ARG_REQUIRED_VERSION}) + message(DEBUG "Couldn't find ${DEPENDENCY_NAME} >= ${ARG_REQUIRED_VERSION}") + set(COMPATIBLE FALSE) + endif() + endif() + if(COMPATIBLE) + set(${DEPENDENCY_NAME}_SOURCE "SYSTEM") + else() + build_dependency(${DEPENDENCY_NAME}) + set(${DEPENDENCY_NAME}_SOURCE "BUNDLED") + endif() + elseif(${DEPENDENCY_NAME}_SOURCE STREQUAL "BUNDLED") + build_dependency(${DEPENDENCY_NAME}) + elseif(${DEPENDENCY_NAME}_SOURCE STREQUAL "SYSTEM") + find_package(${FIND_PACKAGE_ARGUMENTS} REQUIRED) + if(ARG_FORCE_ANY_NEWER_VERSION AND ARG_REQUIRED_VERSION) + if(${${PACKAGE_NAME}_VERSION} VERSION_LESS ${ARG_REQUIRED_VERSION}) + message(FATAL_ERROR "Couldn't find ${DEPENDENCY_NAME} >= ${ARG_REQUIRED_VERSION}") + endif() + endif() + endif() + if(${DEPENDENCY_NAME}_SOURCE STREQUAL "SYSTEM" AND ARG_IS_RUNTIME_DEPENDENCY) + if(NOT ARG_ARROW_CMAKE_PACKAGE_NAME) + set(ARG_ARROW_CMAKE_PACKAGE_NAME "Arrow") + endif() + # ArrowFlight -> _Arrow_Flight + string(REGEX REPLACE "([A-Z])" "_\\1" ARG_ARROW_CMAKE_PACKAGE_NAME_SNAKE + ${ARG_ARROW_CMAKE_PACKAGE_NAME}) + # _Arrow_Flight -> Arrow_Flight + string(SUBSTRING ${ARG_ARROW_CMAKE_PACKAGE_NAME_SNAKE} 1 -1 + ARG_ARROW_CMAKE_PACKAGE_NAME_SNAKE) + # Arrow_Flight -> ARROW_FLIGHT + string(TOUPPER ${ARG_ARROW_CMAKE_PACKAGE_NAME_SNAKE} + ARG_ARROW_CMAKE_PACKAGE_NAME_UPPER_SNAKE) + provide_find_module(${PACKAGE_NAME} ${ARG_ARROW_CMAKE_PACKAGE_NAME}) + list(APPEND ${ARG_ARROW_CMAKE_PACKAGE_NAME_UPPER_SNAKE}_SYSTEM_DEPENDENCIES + ${PACKAGE_NAME}) + if(NOT ARG_ARROW_PC_PACKAGE_NAME) + set(ARG_ARROW_PC_PACKAGE_NAME "arrow") + endif() + # arrow-flight -> arrow_flight + string(REPLACE "-" "_" ARG_ARROW_PC_PACKAGE_NAME_SNAKE ${ARG_ARROW_PC_PACKAGE_NAME}) + # arrow_flight -> ARROW_FLIGHT + string(TOUPPER ${ARG_ARROW_PC_PACKAGE_NAME_SNAKE} + ARG_ARROW_PC_PACKAGE_NAME_UPPER_SNAKE) + if(ARROW_BUILD_STATIC) + find_package(PkgConfig QUIET) + foreach(ARG_PC_PACKAGE_NAME ${ARG_PC_PACKAGE_NAMES}) + pkg_check_modules(${ARG_PC_PACKAGE_NAME}_PC + ${ARG_PC_PACKAGE_NAME} + NO_CMAKE_PATH + NO_CMAKE_ENVIRONMENT_PATH + QUIET) + set(RESOLVE_DEPENDENCY_PC_PACKAGE + "pkg-config package for ${ARG_PC_PACKAGE_NAME} ") + string(APPEND RESOLVE_DEPENDENCY_PC_PACKAGE + "that is used by ${ARG_ARROW_PC_PACKAGE_NAME} for static link") + if(${${ARG_PC_PACKAGE_NAME}_PC_FOUND}) + message(STATUS "Using ${RESOLVE_DEPENDENCY_PC_PACKAGE}") + string(APPEND ${ARG_ARROW_PC_PACKAGE_NAME_UPPER_SNAKE}_PC_REQUIRES_PRIVATE + " ${ARG_PC_PACKAGE_NAME}") + else() + message(STATUS "${RESOLVE_DEPENDENCY_PC_PACKAGE} isn't found") + endif() + endforeach() + endif() + endif() +endmacro() + +# ---------------------------------------------------------------------- +# Thirdparty versions, environment variables, source URLs + +set(THIRDPARTY_DIR "${arrow_SOURCE_DIR}/thirdparty") + +add_library(arrow::flatbuffers INTERFACE IMPORTED) +target_include_directories(arrow::flatbuffers + INTERFACE "${THIRDPARTY_DIR}/flatbuffers/include") + +# ---------------------------------------------------------------------- +# Some EP's require other EP's + +if(ARROW_WITH_OPENTELEMETRY) + set(ARROW_WITH_NLOHMANN_JSON ON) + set(ARROW_WITH_PROTOBUF ON) +endif() + +if(ARROW_PARQUET) + set(ARROW_WITH_RAPIDJSON ON) + set(ARROW_WITH_THRIFT ON) +endif() + +if(ARROW_WITH_THRIFT) + set(ARROW_WITH_ZLIB ON) +endif() + +if(ARROW_FLIGHT) + set(ARROW_WITH_GRPC ON) +endif() + +if(ARROW_WITH_GRPC) + set(ARROW_WITH_RE2 ON) + set(ARROW_WITH_ZLIB ON) +endif() + +if(ARROW_GCS) + set(ARROW_WITH_GOOGLE_CLOUD_CPP ON) + set(ARROW_WITH_NLOHMANN_JSON ON) + set(ARROW_WITH_ZLIB ON) +endif() + +if(ARROW_AZURE) + set(ARROW_WITH_AZURE_SDK ON) +endif() + +if(ARROW_JSON OR ARROW_FLIGHT_SQL_ODBC) + set(ARROW_WITH_RAPIDJSON ON) +endif() + +if(ARROW_ORC OR ARROW_FLIGHT) + set(ARROW_WITH_PROTOBUF ON) +endif() + +if(ARROW_SUBSTRAIT) + set(ARROW_WITH_PROTOBUF ON) +endif() + +if(ARROW_S3) + set(ARROW_WITH_ZLIB ON) +endif() + +if((NOT ARROW_COMPUTE) AND (NOT ARROW_GANDIVA)) + set(ARROW_WITH_UTF8PROC OFF) +endif() + +if((NOT ARROW_COMPUTE) + AND (NOT ARROW_GANDIVA) + AND (NOT ARROW_WITH_GRPC)) + set(ARROW_WITH_RE2 OFF) +endif() + +# ---------------------------------------------------------------------- +# Versions and URLs for toolchain builds, which also can be used to configure +# offline builds +# Note: We should not use the Apache dist server for build dependencies + +macro(set_urls URLS) + set(${URLS} ${ARGN}) +endmacro() + +# Read toolchain versions from cpp/thirdparty/versions.txt +file(STRINGS "${THIRDPARTY_DIR}/versions.txt" TOOLCHAIN_VERSIONS_TXT) +foreach(_VERSION_ENTRY ${TOOLCHAIN_VERSIONS_TXT}) + # Exclude comments + if(NOT ((_VERSION_ENTRY MATCHES "^[^#][A-Za-z0-9-_]+_VERSION=") + OR (_VERSION_ENTRY MATCHES "^[^#][A-Za-z0-9-_]+_CHECKSUM="))) + continue() + endif() + + string(REGEX MATCH "^[^=]*" _VARIABLE_NAME ${_VERSION_ENTRY}) + string(REPLACE "${_VARIABLE_NAME}=" "" _VARIABLE_VALUE ${_VERSION_ENTRY}) + + # Skip blank or malformed lines + if(_VARIABLE_VALUE STREQUAL "") + continue() + endif() + + # For debugging + message(STATUS "${_VARIABLE_NAME}: ${_VARIABLE_VALUE}") + + set(${_VARIABLE_NAME} ${_VARIABLE_VALUE}) +endforeach() + +set(THIRDPARTY_MIRROR_URL "https://apache.jfrog.io/artifactory/arrow/thirdparty/7.0.0") + +if(DEFINED ENV{ARROW_ABSL_URL}) + set(ABSL_SOURCE_URL "$ENV{ARROW_ABSL_URL}") +else() + set_urls(ABSL_SOURCE_URL + "https://github.com/abseil/abseil-cpp/archive/${ARROW_ABSL_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_AWS_C_AUTH_URL}) + set(AWS_C_AUTH_SOURCE_URL "$ENV{ARROW_AWS_C_AUTH_URL}") +else() + set_urls(AWS_C_AUTH_SOURCE_URL + "https://github.com/awslabs/aws-c-auth/archive/${ARROW_AWS_C_AUTH_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_AWS_C_CAL_URL}) + set(AWS_C_CAL_SOURCE_URL "$ENV{ARROW_AWS_C_CAL_URL}") +else() + set_urls(AWS_C_CAL_SOURCE_URL + "https://github.com/awslabs/aws-c-cal/archive/${ARROW_AWS_C_CAL_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_AWS_C_COMMON_URL}) + set(AWS_C_COMMON_SOURCE_URL "$ENV{ARROW_AWS_C_COMMON_URL}") +else() + set_urls(AWS_C_COMMON_SOURCE_URL + "https://github.com/awslabs/aws-c-common/archive/${ARROW_AWS_C_COMMON_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_AWS_C_COMPRESSION_URL}) + set(AWS_C_COMPRESSION_SOURCE_URL "$ENV{ARROW_AWS_C_COMPRESSION_URL}") +else() + set_urls(AWS_C_COMPRESSION_SOURCE_URL + "https://github.com/awslabs/aws-c-compression/archive/${ARROW_AWS_C_COMPRESSION_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_AWS_C_EVENT_STREAM_URL}) + set(AWS_C_EVENT_STREAM_SOURCE_URL "$ENV{ARROW_AWS_C_EVENT_STREAM_URL}") +else() + set_urls(AWS_C_EVENT_STREAM_SOURCE_URL + "https://github.com/awslabs/aws-c-event-stream/archive/${ARROW_AWS_C_EVENT_STREAM_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_AWS_C_HTTP_URL}) + set(AWS_C_HTTP_SOURCE_URL "$ENV{ARROW_AWS_C_HTTP_URL}") +else() + set_urls(AWS_C_HTTP_SOURCE_URL + "https://github.com/awslabs/aws-c-http/archive/${ARROW_AWS_C_HTTP_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_AWS_C_IO_URL}) + set(AWS_C_IO_SOURCE_URL "$ENV{ARROW_AWS_C_IO_URL}") +else() + set_urls(AWS_C_IO_SOURCE_URL + "https://github.com/awslabs/aws-c-io/archive/${ARROW_AWS_C_IO_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_AWS_C_MQTT_URL}) + set(AWS_C_MQTT_SOURCE_URL "$ENV{ARROW_AWS_C_MQTT_URL}") +else() + set_urls(AWS_C_MQTT_SOURCE_URL + "https://github.com/awslabs/aws-c-mqtt/archive/${ARROW_AWS_C_MQTT_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_AWS_C_S3_URL}) + set(AWS_C_S3_SOURCE_URL "$ENV{ARROW_AWS_C_S3_URL}") +else() + set_urls(AWS_C_S3_SOURCE_URL + "https://github.com/awslabs/aws-c-s3/archive/${ARROW_AWS_C_S3_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_AWS_C_SDKUTILS_URL}) + set(AWS_C_SDKUTILS_SOURCE_URL "$ENV{ARROW_AWS_C_SDKUTILS_URL}") +else() + set_urls(AWS_C_SDKUTILS_SOURCE_URL + "https://github.com/awslabs/aws-c-sdkutils/archive/${ARROW_AWS_C_SDKUTILS_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_AWS_CHECKSUMS_URL}) + set(AWS_CHECKSUMS_SOURCE_URL "$ENV{ARROW_AWS_CHECKSUMS_URL}") +else() + set_urls(AWS_CHECKSUMS_SOURCE_URL + "https://github.com/awslabs/aws-checksums/archive/${ARROW_AWS_CHECKSUMS_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_AWS_CRT_CPP_URL}) + set(AWS_CRT_CPP_SOURCE_URL "$ENV{ARROW_AWS_CRT_CPP_URL}") +else() + set_urls(AWS_CRT_CPP_SOURCE_URL + "https://github.com/awslabs/aws-crt-cpp/archive/${ARROW_AWS_CRT_CPP_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_AWS_LC_URL}) + set(AWS_LC_SOURCE_URL "$ENV{ARROW_AWS_LC_URL}") +else() + set_urls(AWS_LC_SOURCE_URL + "https://github.com/awslabs/aws-lc/archive/${ARROW_AWS_LC_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_AWSSDK_URL}) + set(AWSSDK_SOURCE_URL "$ENV{ARROW_AWSSDK_URL}") +else() + set_urls(AWSSDK_SOURCE_URL + "https://github.com/aws/aws-sdk-cpp/archive/${ARROW_AWSSDK_BUILD_VERSION}.tar.gz" + "${THIRDPARTY_MIRROR_URL}/aws-sdk-cpp-${ARROW_AWSSDK_BUILD_VERSION}.tar.gz") +endif() + +if(DEFINED ENV{ARROW_AZURE_SDK_URL}) + set(ARROW_AZURE_SDK_URL "$ENV{ARROW_AZURE_SDK_URL}") +else() + set_urls(ARROW_AZURE_SDK_URL + "https://github.com/Azure/azure-sdk-for-cpp/archive/${ARROW_AZURE_SDK_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_BOOST_URL}) + set(BOOST_SOURCE_URL "$ENV{ARROW_BOOST_URL}") +else() + set_urls(BOOST_SOURCE_URL + "https://github.com/boostorg/boost/releases/download/boost-${ARROW_BOOST_BUILD_VERSION}/boost-${ARROW_BOOST_BUILD_VERSION}-cmake.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_BROTLI_URL}) + set(BROTLI_SOURCE_URL "$ENV{ARROW_BROTLI_URL}") +else() + set_urls(BROTLI_SOURCE_URL + "https://github.com/google/brotli/archive/${ARROW_BROTLI_BUILD_VERSION}.tar.gz" + "${THIRDPARTY_MIRROR_URL}/brotli-${ARROW_BROTLI_BUILD_VERSION}.tar.gz") +endif() + +if(DEFINED ENV{ARROW_BZIP2_URL}) + set(ARROW_BZIP2_SOURCE_URL "$ENV{ARROW_BZIP2_URL}") +else() + set_urls(ARROW_BZIP2_SOURCE_URL + "https://sourceware.org/pub/bzip2/bzip2-${ARROW_BZIP2_BUILD_VERSION}.tar.gz" + "${THIRDPARTY_MIRROR_URL}/bzip2-${ARROW_BZIP2_BUILD_VERSION}.tar.gz") +endif() + +if(DEFINED ENV{ARROW_CARES_URL}) + set(CARES_SOURCE_URL "$ENV{ARROW_CARES_URL}") +else() + string(REPLACE "." "_" ARROW_CARES_BUILD_VERSION_UNDERSCORES + ${ARROW_CARES_BUILD_VERSION}) + set_urls(CARES_SOURCE_URL + "https://github.com/c-ares/c-ares/releases/download/cares-${ARROW_CARES_BUILD_VERSION_UNDERSCORES}/c-ares-${ARROW_CARES_BUILD_VERSION}.tar.gz" + "${THIRDPARTY_MIRROR_URL}/cares-${ARROW_CARES_BUILD_VERSION}.tar.gz") +endif() + +if(DEFINED ENV{ARROW_CRC32C_URL}) + set(CRC32C_SOURCE_URL "$ENV{ARROW_CRC32C_URL}") +else() + set_urls(CRC32C_SOURCE_URL + "https://github.com/google/crc32c/archive/${ARROW_CRC32C_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_GBENCHMARK_URL}) + set(GBENCHMARK_SOURCE_URL "$ENV{ARROW_GBENCHMARK_URL}") +else() + set_urls(GBENCHMARK_SOURCE_URL + "https://github.com/google/benchmark/archive/${ARROW_GBENCHMARK_BUILD_VERSION}.tar.gz" + "${THIRDPARTY_MIRROR_URL}/gbenchmark-${ARROW_GBENCHMARK_BUILD_VERSION}.tar.gz") +endif() + +if(DEFINED ENV{ARROW_GFLAGS_URL}) + set(GFLAGS_SOURCE_URL "$ENV{ARROW_GFLAGS_URL}") +else() + set_urls(GFLAGS_SOURCE_URL + "https://github.com/gflags/gflags/archive/${ARROW_GFLAGS_BUILD_VERSION}.tar.gz" + "${THIRDPARTY_MIRROR_URL}/gflags-${ARROW_GFLAGS_BUILD_VERSION}.tar.gz") +endif() + +if(DEFINED ENV{ARROW_GLOG_URL}) + set(GLOG_SOURCE_URL "$ENV{ARROW_GLOG_URL}") +else() + set_urls(GLOG_SOURCE_URL + "https://github.com/google/glog/archive/${ARROW_GLOG_BUILD_VERSION}.tar.gz" + "${THIRDPARTY_MIRROR_URL}/glog-${ARROW_GLOG_BUILD_VERSION}.tar.gz") +endif() + +if(DEFINED ENV{ARROW_GOOGLE_CLOUD_CPP_URL}) + set(google_cloud_cpp_storage_SOURCE_URL "$ENV{ARROW_GOOGLE_CLOUD_CPP_URL}") +else() + set_urls(google_cloud_cpp_storage_SOURCE_URL + "https://github.com/googleapis/google-cloud-cpp/archive/${ARROW_GOOGLE_CLOUD_CPP_BUILD_VERSION}.tar.gz" + "${THIRDPARTY_MIRROR_URL}/google-cloud-cpp-${ARROW_GOOGLE_CLOUD_CPP_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_GRPC_URL}) + set(GRPC_SOURCE_URL "$ENV{ARROW_GRPC_URL}") +else() + set_urls(GRPC_SOURCE_URL + "https://github.com/grpc/grpc/archive/${ARROW_GRPC_BUILD_VERSION}.tar.gz" + "${THIRDPARTY_MIRROR_URL}/grpc-${ARROW_GRPC_BUILD_VERSION}.tar.gz") +endif() + +if(DEFINED ENV{ARROW_GTEST_URL}) + set(GTEST_SOURCE_URL "$ENV{ARROW_GTEST_URL}") +else() + set_urls(GTEST_SOURCE_URL + "https://github.com/google/googletest/releases/download/v${ARROW_GTEST_BUILD_VERSION}/googletest-${ARROW_GTEST_BUILD_VERSION}.tar.gz" + "${THIRDPARTY_MIRROR_URL}/gtest-${ARROW_GTEST_BUILD_VERSION}.tar.gz") +endif() + +if(DEFINED ENV{ARROW_JEMALLOC_URL}) + set(JEMALLOC_SOURCE_URL "$ENV{ARROW_JEMALLOC_URL}") +else() + set_urls(JEMALLOC_SOURCE_URL + "https://github.com/jemalloc/jemalloc/releases/download/${ARROW_JEMALLOC_BUILD_VERSION}/jemalloc-${ARROW_JEMALLOC_BUILD_VERSION}.tar.bz2" + "${THIRDPARTY_MIRROR_URL}/jemalloc-${ARROW_JEMALLOC_BUILD_VERSION}.tar.bz2") +endif() + +if(DEFINED ENV{ARROW_MIMALLOC_URL}) + set(MIMALLOC_SOURCE_URL "$ENV{ARROW_MIMALLOC_URL}") +else() + set_urls(MIMALLOC_SOURCE_URL + "https://github.com/microsoft/mimalloc/archive/${ARROW_MIMALLOC_BUILD_VERSION}.tar.gz" + "${THIRDPARTY_MIRROR_URL}/mimalloc-${ARROW_MIMALLOC_BUILD_VERSION}.tar.gz") +endif() + +if(DEFINED ENV{ARROW_NLOHMANN_JSON_URL}) + set(NLOHMANN_JSON_SOURCE_URL "$ENV{ARROW_NLOHMANN_JSON_URL}") +else() + set_urls(NLOHMANN_JSON_SOURCE_URL + "https://github.com/nlohmann/json/archive/${ARROW_NLOHMANN_JSON_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_LZ4_URL}) + set(LZ4_SOURCE_URL "$ENV{ARROW_LZ4_URL}") +else() + set_urls(LZ4_SOURCE_URL + "https://github.com/lz4/lz4/archive/${ARROW_LZ4_BUILD_VERSION}.tar.gz" + "${THIRDPARTY_MIRROR_URL}/lz4-${ARROW_LZ4_BUILD_VERSION}.tar.gz") +endif() + +if(DEFINED ENV{ARROW_ORC_URL}) + set(ORC_SOURCE_URL "$ENV{ARROW_ORC_URL}") +else() + set_urls(ORC_SOURCE_URL + "https://www.apache.org/dyn/closer.lua/orc/orc-${ARROW_ORC_BUILD_VERSION}/orc-${ARROW_ORC_BUILD_VERSION}.tar.gz?action=download" + "https://dlcdn.apache.org/orc/orc-${ARROW_ORC_BUILD_VERSION}/orc-${ARROW_ORC_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_OPENTELEMETRY_URL}) + set(OPENTELEMETRY_SOURCE_URL "$ENV{ARROW_OPENTELEMETRY_URL}") +else() + # TODO: add mirror + set_urls(OPENTELEMETRY_SOURCE_URL + "https://github.com/open-telemetry/opentelemetry-cpp/archive/refs/tags/${ARROW_OPENTELEMETRY_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_OPENTELEMETRY_PROTO_URL}) + set(OPENTELEMETRY_PROTO_SOURCE_URL "$ENV{ARROW_OPENTELEMETRY_PROTO_URL}") +else() + # TODO: add mirror + # N.B. upstream pins to particular commits, not tags + set_urls(OPENTELEMETRY_PROTO_SOURCE_URL + "https://github.com/open-telemetry/opentelemetry-proto/archive/${ARROW_OPENTELEMETRY_PROTO_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_PROTOBUF_URL}) + set(PROTOBUF_SOURCE_URL "$ENV{ARROW_PROTOBUF_URL}") +else() + string(SUBSTRING ${ARROW_PROTOBUF_BUILD_VERSION} 1 -1 + ARROW_PROTOBUF_STRIPPED_BUILD_VERSION) + # strip the leading `v` + set_urls(PROTOBUF_SOURCE_URL + "https://github.com/protocolbuffers/protobuf/releases/download/${ARROW_PROTOBUF_BUILD_VERSION}/protobuf-all-${ARROW_PROTOBUF_STRIPPED_BUILD_VERSION}.tar.gz" + "${THIRDPARTY_MIRROR_URL}/protobuf-${ARROW_PROTOBUF_BUILD_VERSION}.tar.gz") +endif() + +if(DEFINED ENV{ARROW_RE2_URL}) + set(RE2_SOURCE_URL "$ENV{ARROW_RE2_URL}") +else() + set_urls(RE2_SOURCE_URL + "https://github.com/google/re2/archive/${ARROW_RE2_BUILD_VERSION}.tar.gz" + "${THIRDPARTY_MIRROR_URL}/re2-${ARROW_RE2_BUILD_VERSION}.tar.gz") +endif() + +if(DEFINED ENV{ARROW_RAPIDJSON_URL}) + set(RAPIDJSON_SOURCE_URL "$ENV{ARROW_RAPIDJSON_URL}") +else() + set_urls(RAPIDJSON_SOURCE_URL + "https://github.com/miloyip/rapidjson/archive/${ARROW_RAPIDJSON_BUILD_VERSION}.tar.gz" + "${THIRDPARTY_MIRROR_URL}/rapidjson-${ARROW_RAPIDJSON_BUILD_VERSION}.tar.gz") +endif() + +if(DEFINED ENV{ARROW_S2N_TLS_URL}) + set(S2N_TLS_SOURCE_URL "$ENV{ARROW_S2N_TLS_URL}") +else() + set_urls(S2N_TLS_SOURCE_URL + "https://github.com/aws/s2n-tls/archive/${ARROW_S2N_TLS_BUILD_VERSION}.tar.gz") +endif() + +if(DEFINED ENV{ARROW_SNAPPY_URL}) + set(SNAPPY_SOURCE_URL "$ENV{ARROW_SNAPPY_URL}") +else() + set_urls(SNAPPY_SOURCE_URL + "https://github.com/google/snappy/archive/${ARROW_SNAPPY_BUILD_VERSION}.tar.gz" + "${THIRDPARTY_MIRROR_URL}/snappy-${ARROW_SNAPPY_BUILD_VERSION}.tar.gz") +endif() + +if(DEFINED ENV{ARROW_SUBSTRAIT_URL}) + set(SUBSTRAIT_SOURCE_URL "$ENV{ARROW_SUBSTRAIT_URL}") +else() + set_urls(SUBSTRAIT_SOURCE_URL + "https://github.com/substrait-io/substrait/archive/${ARROW_SUBSTRAIT_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_THRIFT_URL}) + set(THRIFT_SOURCE_URL "$ENV{ARROW_THRIFT_URL}") +else() + set(THRIFT_SOURCE_URL + "https://www.apache.org/dyn/closer.lua/thrift/${ARROW_THRIFT_BUILD_VERSION}/thrift-${ARROW_THRIFT_BUILD_VERSION}.tar.gz?action=download" + "https://dlcdn.apache.org/thrift/${ARROW_THRIFT_BUILD_VERSION}/thrift-${ARROW_THRIFT_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_UTF8PROC_URL}) + set(ARROW_UTF8PROC_SOURCE_URL "$ENV{ARROW_UTF8PROC_URL}") +else() + set_urls(ARROW_UTF8PROC_SOURCE_URL + "https://github.com/JuliaStrings/utf8proc/archive/${ARROW_UTF8PROC_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_WIL_URL}) + set(ARROW_WIL_URL "$ENV{ARROW_WIL_URL}") +else() + set_urls(ARROW_WIL_URL + "https://github.com/microsoft/wil/archive/${ARROW_WIL_BUILD_VERSION}.tar.gz") +endif() + +if(DEFINED ENV{ARROW_XSIMD_URL}) + set(XSIMD_SOURCE_URL "$ENV{ARROW_XSIMD_URL}") +else() + set_urls(XSIMD_SOURCE_URL + "https://github.com/xtensor-stack/xsimd/archive/${ARROW_XSIMD_BUILD_VERSION}.tar.gz" + ) +endif() + +if(DEFINED ENV{ARROW_ZLIB_URL}) + set(ZLIB_SOURCE_URL "$ENV{ARROW_ZLIB_URL}") +else() + set_urls(ZLIB_SOURCE_URL + "https://zlib.net/fossils/zlib-${ARROW_ZLIB_BUILD_VERSION}.tar.gz" + "${THIRDPARTY_MIRROR_URL}/zlib-${ARROW_ZLIB_BUILD_VERSION}.tar.gz") +endif() + +if(DEFINED ENV{ARROW_ZSTD_URL}) + set(ZSTD_SOURCE_URL "$ENV{ARROW_ZSTD_URL}") +else() + set_urls(ZSTD_SOURCE_URL + "https://github.com/facebook/zstd/releases/download/v${ARROW_ZSTD_BUILD_VERSION}/zstd-${ARROW_ZSTD_BUILD_VERSION}.tar.gz" + ) +endif() + +# ---------------------------------------------------------------------- +# ExternalProject options + +set(EP_LIST_SEPARATOR "|") +set(EP_COMMON_OPTIONS LIST_SEPARATOR ${EP_LIST_SEPARATOR}) + +set(EP_CXX_FLAGS "${CMAKE_CXX_FLAGS}") +set(EP_C_FLAGS "${CMAKE_C_FLAGS}") +if(NOT MSVC_TOOLCHAIN) + # Set -fPIC on all external projects + string(APPEND EP_CXX_FLAGS " -fPIC") + string(APPEND EP_C_FLAGS " -fPIC") +endif() + +# We pass MSVC runtime related options via +# CMAKE_${LANG}_FLAGS_${CONFIG} explicitly because external projects +# may not require CMake 3.15 or later. If an external project doesn't +# require CMake 3.15 or later, CMAKE_MSVC_RUNTIME_LIBRARY is ignored. +# If CMAKE_MSVC_RUNTIME_LIBRARY is ignored, an external project may +# use different MSVC runtime. For example, Apache Arrow C++ uses /MTd +# (multi threaded debug) but an external project uses /MT (multi +# threaded release). It causes an link error. +foreach(CONFIG DEBUG MINSIZEREL RELEASE RELWITHDEBINFO) + set(EP_CXX_FLAGS_${CONFIG} "${CMAKE_CXX_FLAGS_${CONFIG}}") + set(EP_C_FLAGS_${CONFIG} "${CMAKE_C_FLAGS_${CONFIG}}") + if(CONFIG STREQUAL DEBUG) + set(EP_MSVC_RUNTIME_LIBRARY MultiThreadedDebugDLL) + else() + set(EP_MSVC_RUNTIME_LIBRARY MultiThreadedDLL) + endif() + string(APPEND EP_CXX_FLAGS_${CONFIG} + " ${CMAKE_CXX_COMPILE_OPTIONS_MSVC_RUNTIME_LIBRARY_${EP_MSVC_RUNTIME_LIBRARY}}") + string(APPEND EP_C_FLAGS_${CONFIG} + " ${CMAKE_C_COMPILE_OPTIONS_MSVC_RUNTIME_LIBRARY_${EP_MSVC_RUNTIME_LIBRARY}}") +endforeach() +if(MSVC_TOOLCHAIN) + string(REPLACE "/WX" "" EP_CXX_FLAGS_DEBUG "${EP_CXX_FLAGS_DEBUG}") + string(REPLACE "/WX" "" EP_C_FLAGS_DEBUG "${EP_C_FLAGS_DEBUG}") +else() + string(APPEND EP_CXX_FLAGS_DEBUG " -Wno-error") + string(APPEND EP_C_FLAGS_DEBUG " -Wno-error") +endif() + +# CC/CXX environment variables are captured on the first invocation of the +# builder (e.g make or ninja) instead of when CMake is invoked into to build +# directory. This leads to issues if the variables are exported in a subshell +# and the invocation of make/ninja is in distinct subshell without the same +# environment (CC/CXX). +set(EP_C_COMPILER "${CMAKE_C_COMPILER}") +if(NOT CMAKE_VERSION VERSION_LESS 3.19) + if(CMAKE_C_COMPILER_ARG1) + separate_arguments(EP_C_COMPILER_ARGS NATIVE_COMMAND "${CMAKE_C_COMPILER_ARG1}") + list(APPEND EP_C_COMPILER ${EP_C_COMPILER_ARGS}) + endif() + string(REPLACE ";" ${EP_LIST_SEPARATOR} EP_C_COMPILER "${EP_C_COMPILER}") +endif() +set(EP_CXX_COMPILER "${CMAKE_CXX_COMPILER}") +if(NOT CMAKE_VERSION VERSION_LESS 3.19) + if(CMAKE_CXX_COMPILER_ARG1) + separate_arguments(EP_CXX_COMPILER_ARGS NATIVE_COMMAND "${CMAKE_CXX_COMPILER_ARG1}") + list(APPEND EP_CXX_COMPILER ${EP_CXX_COMPILER_ARGS}) + endif() + string(REPLACE ";" ${EP_LIST_SEPARATOR} EP_CXX_COMPILER "${EP_CXX_COMPILER}") +endif() +set(EP_COMMON_TOOLCHAIN "-DCMAKE_C_COMPILER=${EP_C_COMPILER}" + "-DCMAKE_CXX_COMPILER=${EP_CXX_COMPILER}") + +if(CMAKE_AR) + # Ensure using absolute path. + find_program(EP_CMAKE_AR ${CMAKE_AR} REQUIRED) + list(APPEND EP_COMMON_TOOLCHAIN -DCMAKE_AR=${EP_CMAKE_AR}) +endif() + +# RANLIB isn't used for MSVC +if(NOT MSVC) + if(CMAKE_RANLIB) + # Ensure using absolute path. + find_program(EP_CMAKE_RANLIB ${CMAKE_RANLIB} REQUIRED) + list(APPEND EP_COMMON_TOOLCHAIN -DCMAKE_RANLIB=${EP_CMAKE_RANLIB}) + endif() +endif() + +# External projects are still able to override the following declarations. +# cmake command line will favor the last defined variable when a duplicate is +# encountered. This requires that `EP_COMMON_CMAKE_ARGS` is always the first +# argument. +set(EP_COMMON_CMAKE_ARGS + ${EP_COMMON_TOOLCHAIN} + -DBUILD_SHARED_LIBS=OFF + -DBUILD_STATIC_LIBS=ON + -DBUILD_TESTING=OFF + -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} + -DCMAKE_CXX_FLAGS=${EP_CXX_FLAGS} + -DCMAKE_CXX_FLAGS_DEBUG=${EP_CXX_FLAGS_DEBUG} + -DCMAKE_CXX_FLAGS_MISIZEREL=${EP_CXX_FLAGS_MINSIZEREL} + -DCMAKE_CXX_FLAGS_RELEASE=${EP_CXX_FLAGS_RELEASE} + -DCMAKE_CXX_FLAGS_RELWITHDEBINFO=${EP_CXX_FLAGS_RELWITHDEBINFO} + -DCMAKE_CXX_STANDARD=${CMAKE_CXX_STANDARD} + -DCMAKE_C_FLAGS=${EP_C_FLAGS} + -DCMAKE_C_FLAGS_DEBUG=${EP_C_FLAGS_DEBUG} + -DCMAKE_C_FLAGS_MISIZEREL=${EP_C_FLAGS_MINSIZEREL} + -DCMAKE_C_FLAGS_RELEASE=${EP_C_FLAGS_RELEASE} + -DCMAKE_C_FLAGS_RELWITHDEBINFO=${EP_C_FLAGS_RELWITHDEBINFO} + -DCMAKE_EXPORT_NO_PACKAGE_REGISTRY=${CMAKE_EXPORT_NO_PACKAGE_REGISTRY} + -DCMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY=${CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY} + -DCMAKE_INSTALL_LIBDIR=lib + -DCMAKE_OSX_SYSROOT=${CMAKE_OSX_SYSROOT} + -DCMAKE_VERBOSE_MAKEFILE=${CMAKE_VERBOSE_MAKEFILE} + # We set CMAKE_POLICY_VERSION_MINIMUM temporarily due to failures with CMake 4 + # We should remove it once we have updated the dependencies: + # https://github.com/apache/arrow/issues/45985 + -DCMAKE_POLICY_VERSION_MINIMUM=3.5) + +# if building with a toolchain file, pass that through +if(CMAKE_TOOLCHAIN_FILE) + list(APPEND EP_COMMON_CMAKE_ARGS -DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}) +endif() + +# and crosscompiling emulator (for try_run() ) +if(CMAKE_CROSSCOMPILING_EMULATOR) + string(REPLACE ";" ${EP_LIST_SEPARATOR} EP_CMAKE_CROSSCOMPILING_EMULATOR + "${CMAKE_CROSSCOMPILING_EMULATOR}") + list(APPEND EP_COMMON_CMAKE_ARGS + -DCMAKE_CROSSCOMPILING_EMULATOR=${EP_CMAKE_CROSSCOMPILING_EMULATOR}) +endif() + +if(CMAKE_PROJECT_INCLUDE) + list(APPEND EP_COMMON_CMAKE_ARGS -DCMAKE_PROJECT_INCLUDE=${CMAKE_PROJECT_INCLUDE}) +endif() + +# Enable s/ccache if set by parent. +if(CMAKE_C_COMPILER_LAUNCHER AND CMAKE_CXX_COMPILER_LAUNCHER) + file(TO_CMAKE_PATH "${CMAKE_C_COMPILER_LAUNCHER}" EP_CMAKE_C_COMPILER_LAUNCHER) + file(TO_CMAKE_PATH "${CMAKE_CXX_COMPILER_LAUNCHER}" EP_CMAKE_CXX_COMPILER_LAUNCHER) + list(APPEND EP_COMMON_CMAKE_ARGS + -DCMAKE_C_COMPILER_LAUNCHER=${EP_CMAKE_C_COMPILER_LAUNCHER} + -DCMAKE_CXX_COMPILER_LAUNCHER=${EP_CMAKE_CXX_COMPILER_LAUNCHER}) +endif() + +if(NOT ARROW_VERBOSE_THIRDPARTY_BUILD) + list(APPEND + EP_COMMON_OPTIONS + LOG_CONFIGURE + 1 + LOG_BUILD + 1 + LOG_INSTALL + 1 + LOG_DOWNLOAD + 1 + LOG_OUTPUT_ON_FAILURE + 1) + set(Boost_DEBUG FALSE) +else() + set(Boost_DEBUG TRUE) +endif() + +# Ensure that a default make is set +if("${MAKE}" STREQUAL "") + if(NOT MSVC) + find_program(MAKE make) + endif() +endif() + +# Args for external projects using make. +set(MAKE_BUILD_ARGS "-j${NPROC}") + +include(FetchContent) +set(FC_DECLARE_COMMON_OPTIONS SYSTEM) +if(CMAKE_VERSION VERSION_GREATER_EQUAL 3.28) + list(APPEND FC_DECLARE_COMMON_OPTIONS EXCLUDE_FROM_ALL TRUE) +endif() + +macro(prepare_fetchcontent) + set(BUILD_SHARED_LIBS OFF) + set(BUILD_STATIC_LIBS ON) + set(BUILD_TESTING OFF) + set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "") + set(CMAKE_COMPILE_WARNING_AS_ERROR OFF) + set(CMAKE_EXPORT_NO_PACKAGE_REGISTRY ON) + set(CMAKE_EXPORT_PACKAGE_REGISTRY OFF) + set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "") + set(CMAKE_MACOSX_RPATH ${ARROW_INSTALL_NAME_RPATH}) + # We set CMAKE_POLICY_VERSION_MINIMUM temporarily due to failures with CMake 4 + # We should remove it once we have updated the dependencies: + # https://github.com/apache/arrow/issues/45985 + set(CMAKE_POLICY_VERSION_MINIMUM 3.5) + # Use "NEW" for CMP0077 by default. + # + # https://cmake.org/cmake/help/latest/policy/CMP0077.html + # + # option() honors normal variables. + set(CMAKE_POLICY_DEFAULT_CMP0077 + NEW + CACHE STRING "") + set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "") + + if(MSVC) + string(REPLACE "/WX" "" CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG}") + string(REPLACE "/WX" "" CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG}") + else() + string(APPEND CMAKE_C_FLAGS_DEBUG " -Wno-error") + string(APPEND CMAKE_CXX_FLAGS_DEBUG " -Wno-error") + endif() +endmacro() + +# ---------------------------------------------------------------------- +# Find pthreads + +if(ARROW_ENABLE_THREADING) + set(THREADS_PREFER_PTHREAD_FLAG ON) + find_package(Threads REQUIRED) +endif() + +# ---------------------------------------------------------------------- +# Add Boost dependencies (code adapted from Apache Kudu) + +function(build_boost) + list(APPEND CMAKE_MESSAGE_INDENT "Boost: ") + message(STATUS "Building from source") + + fetchcontent_declare(boost + ${FC_DECLARE_COMMON_OPTIONS} OVERRIDE_FIND_PACKAGE + URL ${BOOST_SOURCE_URL} + URL_HASH "SHA256=${ARROW_BOOST_BUILD_SHA256_CHECKSUM}") + + prepare_fetchcontent() + set(BOOST_ENABLE_COMPATIBILITY_TARGETS ON) + set(BOOST_EXCLUDE_LIBRARIES) + set(BOOST_INCLUDE_LIBRARIES + ${ARROW_BOOST_COMPONENTS} + ${ARROW_BOOST_OPTIONAL_COMPONENTS} + algorithm + crc + numeric/conversion + scope_exit + throw_exception + tokenizer) + if(ARROW_TESTING + OR ARROW_GANDIVA + OR (NOT ARROW_USE_NATIVE_INT128)) + set(ARROW_BOOST_NEED_MULTIPRECISION TRUE) + else() + set(ARROW_BOOST_NEED_MULTIPRECISION FALSE) + endif() + if(ARROW_ENABLE_THREADING) + if(ARROW_WITH_THRIFT OR (ARROW_FLIGHT_SQL_ODBC AND MSVC)) + list(APPEND BOOST_INCLUDE_LIBRARIES locale) + endif() + if(ARROW_BOOST_NEED_MULTIPRECISION) + list(APPEND BOOST_INCLUDE_LIBRARIES multiprecision) + endif() + list(APPEND BOOST_INCLUDE_LIBRARIES thread) + else() + list(APPEND + BOOST_EXCLUDE_LIBRARIES + asio + container + date_time + lexical_cast + locale + lockfree + math + thread) + endif() + if(ARROW_WITH_THRIFT) + list(APPEND BOOST_INCLUDE_LIBRARIES uuid) + else() + list(APPEND BOOST_EXCLUDE_LIBRARIES uuid) + endif() + set(BOOST_SKIP_INSTALL_RULES ON) + if(NOT ARROW_ENABLE_THREADING) + set(BOOST_UUID_LINK_LIBATOMIC OFF) + endif() + if(MSVC) + string(APPEND CMAKE_C_FLAGS " /EHsc") + string(APPEND CMAKE_CXX_FLAGS " /EHsc") + else() + # This is for https://github.com/boostorg/container/issues/305 + string(APPEND CMAKE_C_FLAGS " -Wno-strict-prototypes") + endif() + if(MSVC AND "${CMAKE_SYSTEM_PROCESSOR}" STREQUAL "ARM64") + set(BOOST_CONTEXT_IMPLEMENTATION + winfib + CACHE STRING "" FORCE) + endif() + set(CMAKE_UNITY_BUILD OFF) + + fetchcontent_makeavailable(boost) + + set(boost_include_dirs) + foreach(library ${BOOST_INCLUDE_LIBRARIES}) + # boost_numeric/conversion -> + # boost_numeric_conversion + string(REPLACE "/" "_" target_name "boost_${library}") + target_link_libraries(${target_name} INTERFACE Boost::disable_autolinking) + list(APPEND boost_include_dirs + $) + endforeach() + target_link_libraries(boost_headers + INTERFACE Boost::algorithm + Boost::crc + Boost::numeric_conversion + Boost::scope_exit + Boost::throw_exception + Boost::tokenizer) + target_compile_definitions(boost_mpl INTERFACE "BOOST_MPL_CFG_NO_PREPROCESSED_HEADERS") + + if(ARROW_BOOST_NEED_MULTIPRECISION) + if(ARROW_ENABLE_THREADING) + target_link_libraries(boost_headers INTERFACE Boost::multiprecision) + else() + # We want to use Boost.multiprecision as standalone mode + # without threading because non-standalone mode requires + # threading. We can't use BOOST_MP_STANDALONE CMake variable for + # this with Boost CMake build. So we create our CMake target for + # it. + add_library(arrow::Boost::multiprecision INTERFACE IMPORTED) + target_include_directories(arrow::Boost::multiprecision + INTERFACE "${boost_SOURCE_DIR}/libs/multiprecision/include" + ) + target_compile_definitions(arrow::Boost::multiprecision + INTERFACE BOOST_MP_STANDALONE=1) + target_link_libraries(boost_headers INTERFACE arrow::Boost::multiprecision) + endif() + endif() + if(ARROW_WITH_THRIFT) + if(ARROW_ENABLE_THREADING) + add_library(arrow::Boost::locale ALIAS boost_locale) + else() + # Apache Parquet depends on Apache Thrift. + # Apache Thrift uses Boost.locale but it only uses header files. + # So we can use this for building Apache Thrift. + add_library(arrow::Boost::locale INTERFACE IMPORTED) + target_include_directories(arrow::Boost::locale + INTERFACE "${boost_SOURCE_DIR}/libs/locale/include") + endif() + endif() + + set(Boost_INCLUDE_DIRS + ${boost_include_dirs} + PARENT_SCOPE) + set(BOOST_VENDORED + TRUE + PARENT_SCOPE) + + list(POP_BACK CMAKE_MESSAGE_INDENT) +endfunction() + +if(CMAKE_CXX_COMPILER_ID STREQUAL "Clang" AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER + 15) + # GH-34094 Older versions of Boost use the deprecated std::unary_function in + # boost/container_hash/hash.hpp and support for that was removed in clang 16 + set(ARROW_BOOST_REQUIRED_VERSION "1.81") +else() + # CentOS 7 uses Boost 1.69. + set(ARROW_BOOST_REQUIRED_VERSION "1.69") +endif() + +set(Boost_USE_MULTITHREADED ON) +if(MSVC AND ARROW_USE_STATIC_CRT) + set(Boost_USE_STATIC_RUNTIME ON) +endif() +# CMake 3.25.0 has 1.80 and older versions. +# +# We can remove this once we require CMake 3.30.0 or later because we +# enable CMP0167 "The FindBoost module is removed." +# https://cmake.org/cmake/help/latest/policy/CMP0167.html with CMake +# 3.30.0 or later. +set(Boost_ADDITIONAL_VERSIONS + "1.89.0" + "1.89" + "1.88.0" + "1.88" + "1.87.0" + "1.87" + "1.86.0" + "1.86" + "1.85.0" + "1.85" + "1.84.0" + "1.84" + "1.83.0" + "1.83" + "1.82.0" + "1.82" + "1.81.0" + "1.81") + +# Compilers that don't support int128_t have a compile-time +# (header-only) dependency on Boost for int128_t. +if(ARROW_USE_UBSAN) + # NOTE: Avoid native int128_t on clang with UBSan as it produces linker errors + # (such as "undefined reference to '__muloti4'") + set(ARROW_USE_NATIVE_INT128 FALSE) +else() + include(CheckCXXSymbolExists) + check_cxx_symbol_exists("_M_ARM64" "" WIN32_ARM64_TARGET) + if(WIN32_ARM64_TARGET AND CMAKE_CXX_COMPILER_ID MATCHES "Clang") + # NOTE: For clang/win-arm64, native int128_t produce linker errors + set(ARROW_USE_NATIVE_INT128 FALSE) + else() + check_cxx_symbol_exists("__SIZEOF_INT128__" "" ARROW_USE_NATIVE_INT128) + endif() +endif() + +# - Gandiva has a compile-time (header-only) dependency on Boost, not runtime. +# - Tests need Boost at runtime. +# - S3FS and Flight benchmarks need Boost at runtime. +# - arrow_testing uses boost::filesystem. So arrow_testing requires +# Boost library. (boost::filesystem isn't header-only.) But if we +# use arrow_testing as a static library without +# using arrow::util::Process, we don't need boost::filesystem. +if(ARROW_BUILD_INTEGRATION + OR ARROW_BUILD_TESTS + OR (ARROW_FLIGHT AND (ARROW_TESTING OR ARROW_BUILD_BENCHMARKS)) + OR ARROW_FLIGHT_SQL_ODBC + OR (ARROW_S3 AND ARROW_BUILD_BENCHMARKS) + OR (ARROW_TESTING AND ARROW_BUILD_SHARED)) + set(ARROW_USE_BOOST TRUE) + set(ARROW_BOOST_REQUIRE_LIBRARY TRUE) +elseif(ARROW_GANDIVA + OR ARROW_TESTING + OR ARROW_WITH_THRIFT + OR (NOT ARROW_USE_NATIVE_INT128)) + set(ARROW_USE_BOOST TRUE) + set(ARROW_BOOST_REQUIRE_LIBRARY FALSE) +else() + set(ARROW_USE_BOOST FALSE) +endif() + +if(ARROW_USE_BOOST) + if(ARROW_BOOST_USE_SHARED) + # Find shared Boost libraries. + set(Boost_USE_STATIC_LIBS OFF) + set(BUILD_SHARED_LIBS_KEEP ${BUILD_SHARED_LIBS}) + set(BUILD_SHARED_LIBS ON) + else() + # Find static boost headers and libs + set(Boost_USE_STATIC_LIBS ON) + endif() + if(ARROW_BOOST_REQUIRE_LIBRARY) + set(ARROW_BOOST_COMPONENTS filesystem) + if(ARROW_FLIGHT_SQL_ODBC) + list(APPEND ARROW_BOOST_COMPONENTS locale) + endif() + if(ARROW_ENABLE_THREADING) + set(ARROW_BOOST_OPTIONAL_COMPONENTS process) + endif() + else() + set(ARROW_BOOST_COMPONENTS) + set(ARROW_BOOST_OPTIONAL_COMPONENTS) + endif() + resolve_dependency(Boost + REQUIRED_VERSION + ${ARROW_BOOST_REQUIRED_VERSION} + COMPONENTS + ${ARROW_BOOST_COMPONENTS} + OPTIONAL_COMPONENTS + ${ARROW_BOOST_OPTIONAL_COMPONENTS} + IS_RUNTIME_DEPENDENCY + # libarrow.so doesn't depend on libboost*. + FALSE) + if(ARROW_BOOST_USE_SHARED) + set(BUILD_SHARED_LIBS ${BUILD_SHARED_LIBS_KEEP}) + unset(BUILD_SHARED_LIBS_KEEP) + endif() + + if(NOT BOOST_VENDORED) + foreach(BOOST_COMPONENT ${ARROW_BOOST_COMPONENTS} ${ARROW_BOOST_OPTIONAL_COMPONENTS}) + set(BOOST_LIBRARY Boost::${BOOST_COMPONENT}) + if(NOT TARGET ${BOOST_LIBRARY}) + continue() + endif() + target_link_libraries(${BOOST_LIBRARY} INTERFACE Boost::disable_autolinking) + if(ARROW_BOOST_USE_SHARED) + target_link_libraries(${BOOST_LIBRARY} INTERFACE Boost::dynamic_linking) + endif() + endforeach() + endif() + + if(ARROW_ENABLE_THREADING) + set(BOOST_PROCESS_HAVE_V2 FALSE) + if(TARGET Boost::process) + # Boost >= 1.86 + add_library(arrow::Boost::process INTERFACE IMPORTED) + target_link_libraries(arrow::Boost::process INTERFACE Boost::process) + target_compile_definitions(arrow::Boost::process INTERFACE "BOOST_PROCESS_HAVE_V1") + target_compile_definitions(arrow::Boost::process INTERFACE "BOOST_PROCESS_HAVE_V2") + set(BOOST_PROCESS_HAVE_V2 TRUE) + else() + # Boost < 1.86 + add_library(arrow::Boost::process INTERFACE IMPORTED) + if(TARGET Boost::filesystem) + target_link_libraries(arrow::Boost::process INTERFACE Boost::filesystem) + endif() + if(TARGET Boost::headers) + target_link_libraries(arrow::Boost::process INTERFACE Boost::headers) + endif() + if(Boost_VERSION VERSION_GREATER_EQUAL 1.80) + target_compile_definitions(arrow::Boost::process + INTERFACE "BOOST_PROCESS_HAVE_V2") + set(BOOST_PROCESS_HAVE_V2 TRUE) + # Boost < 1.86 has a bug that + # boost::process::v2::process_environment::on_setup() isn't + # defined. We need to build Boost Process source to define it. + # + # See also: + # https://github.com/boostorg/process/issues/312 + target_compile_definitions(arrow::Boost::process + INTERFACE "BOOST_PROCESS_NEED_SOURCE") + if(WIN32) + target_link_libraries(arrow::Boost::process INTERFACE bcrypt ntdll) + endif() + endif() + endif() + if(BOOST_PROCESS_HAVE_V2 + AND # We can't use v2 API on Windows because v2 API doesn't support + # process group[1] and GCS testbench uses multiple processes[2]. + # + # [1] https://github.com/boostorg/process/issues/259 + # [2] https://github.com/googleapis/storage-testbench/issues/669 + (NOT WIN32) + AND # We can't use v2 API with musl libc with Boost Process < 1.86 + # because Boost Process < 1.86 doesn't support musl libc[3]. + # + # [3] https://github.com/boostorg/process/commit/aea22dbf6be1695ceb42367590b6ca34d9433500 + (NOT (ARROW_WITH_MUSL AND (Boost_VERSION VERSION_LESS 1.86)))) + target_compile_definitions(arrow::Boost::process INTERFACE "BOOST_PROCESS_USE_V2") + endif() + endif() + + message(STATUS "Boost include dir: ${Boost_INCLUDE_DIRS}") +endif() + +# ---------------------------------------------------------------------- +# cURL + +macro(find_curl) + if(NOT TARGET CURL::libcurl) + find_package(CURL REQUIRED) + list(APPEND ARROW_SYSTEM_DEPENDENCIES CURL) + endif() +endmacro() + +# ---------------------------------------------------------------------- +# Snappy + +macro(build_snappy) + message(STATUS "Building snappy from source") + set(SNAPPY_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/snappy_ep/src/snappy_ep-install") + set(SNAPPY_STATIC_LIB_NAME snappy) + set(SNAPPY_STATIC_LIB + "${SNAPPY_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}${SNAPPY_STATIC_LIB_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX}" + ) + + set(SNAPPY_CMAKE_ARGS + ${EP_COMMON_CMAKE_ARGS} -DSNAPPY_BUILD_TESTS=OFF -DSNAPPY_BUILD_BENCHMARKS=OFF + "-DCMAKE_INSTALL_PREFIX=${SNAPPY_PREFIX}") + # We can remove this once we remove -DCMAKE_POLICY_VERSION_MINIMUM=3.5 + # from EP_COMMON_CMAKE_ARGS. + list(REMOVE_ITEM SNAPPY_CMAKE_ARGS -DCMAKE_POLICY_VERSION_MINIMUM=3.5) + # Snappy unconditionally enables -Werror when building with clang this can lead + # to build failures by way of new compiler warnings. This adds a flag to disable + # -Werror to the very end of the invocation to override the snappy internal setting. + set(SNAPPY_ADDITIONAL_CXX_FLAGS "") + if(CMAKE_CXX_COMPILER_ID MATCHES "Clang") + string(APPEND SNAPPY_ADDITIONAL_CXX_FLAGS " -Wno-error") + endif() + # Snappy unconditionally disables RTTI, which is incompatible with some other + # build settings (https://github.com/apache/arrow/issues/43688). + if(NOT MSVC) + string(APPEND SNAPPY_ADDITIONAL_CXX_FLAGS " -frtti") + endif() + + foreach(CONFIG DEBUG MINSIZEREL RELEASE RELWITHDEBINFO) + list(APPEND + SNAPPY_CMAKE_ARGS + "-DCMAKE_CXX_FLAGS_${CONFIG}=${EP_CXX_FLAGS_${CONFIG}} ${SNAPPY_ADDITIONAL_CXX_FLAGS}" + ) + endforeach() + + if(CMAKE_SYSTEM_NAME STREQUAL "Emscripten") + # ignore linker flag errors, as Snappy sets + # -Werror -Wall, and Emscripten doesn't support -soname + list(APPEND SNAPPY_CMAKE_ARGS + "-DCMAKE_SHARED_LINKER_FLAGS=${CMAKE_SHARED_LINKER_FLAGS}" + "-Wno-error=linkflags") + endif() + + externalproject_add(snappy_ep + ${EP_COMMON_OPTIONS} + BUILD_IN_SOURCE 1 + INSTALL_DIR ${SNAPPY_PREFIX} + URL ${SNAPPY_SOURCE_URL} + URL_HASH "SHA256=${ARROW_SNAPPY_BUILD_SHA256_CHECKSUM}" + CMAKE_ARGS ${SNAPPY_CMAKE_ARGS} + BUILD_BYPRODUCTS "${SNAPPY_STATIC_LIB}") + + file(MAKE_DIRECTORY "${SNAPPY_PREFIX}/include") + + set(Snappy_TARGET Snappy::snappy-static) + add_library(${Snappy_TARGET} STATIC IMPORTED) + set_target_properties(${Snappy_TARGET} PROPERTIES IMPORTED_LOCATION + "${SNAPPY_STATIC_LIB}") + target_include_directories(${Snappy_TARGET} BEFORE INTERFACE "${SNAPPY_PREFIX}/include") + add_dependencies(${Snappy_TARGET} snappy_ep) + + list(APPEND ARROW_BUNDLED_STATIC_LIBS ${Snappy_TARGET}) +endmacro() + +if(ARROW_WITH_SNAPPY) + resolve_dependency(Snappy + HAVE_ALT + TRUE + PC_PACKAGE_NAMES + snappy) + if(${Snappy_SOURCE} STREQUAL "SYSTEM" + AND NOT snappy_PC_FOUND + AND ARROW_BUILD_STATIC) + get_target_property(SNAPPY_TYPE ${Snappy_TARGET} TYPE) + if(NOT SNAPPY_TYPE STREQUAL "INTERFACE_LIBRARY") + string(APPEND ARROW_PC_LIBS_PRIVATE " $") + endif() + endif() +endif() + +# ---------------------------------------------------------------------- +# Brotli + +macro(build_brotli) + message(STATUS "Building brotli from source") + set(BROTLI_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/brotli_ep/src/brotli_ep-install") + set(BROTLI_INCLUDE_DIR "${BROTLI_PREFIX}/include") + set(BROTLI_LIB_DIR "${BROTLI_PREFIX}/lib") + set(BROTLI_STATIC_LIBRARY_ENC + "${BROTLI_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}brotlienc-static${CMAKE_STATIC_LIBRARY_SUFFIX}" + ) + set(BROTLI_STATIC_LIBRARY_DEC + "${BROTLI_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}brotlidec-static${CMAKE_STATIC_LIBRARY_SUFFIX}" + ) + set(BROTLI_STATIC_LIBRARY_COMMON + "${BROTLI_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}brotlicommon-static${CMAKE_STATIC_LIBRARY_SUFFIX}" + ) + set(BROTLI_CMAKE_ARGS ${EP_COMMON_CMAKE_ARGS} "-DCMAKE_INSTALL_PREFIX=${BROTLI_PREFIX}") + + set(BROTLI_EP_OPTIONS) + if(CMAKE_SYSTEM_NAME STREQUAL "Emscripten") + # "cmake install" is disabled for Brotli on Emscripten, so the + # default INSTALL_COMMAND fails. We need to disable the default + # INSTALL_COMMAND. + list(APPEND + BROTLI_EP_OPTIONS + INSTALL_COMMAND + ${CMAKE_COMMAND} + -E + true) + + set(BROTLI_BUILD_DIR ${CMAKE_CURRENT_BINARY_DIR}/brotli_ep-prefix/src/brotli_ep-build) + set(BROTLI_BUILD_LIBS + "${BROTLI_BUILD_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}brotlienc-static${CMAKE_STATIC_LIBRARY_SUFFIX}" + "${BROTLI_BUILD_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}brotlidec-static${CMAKE_STATIC_LIBRARY_SUFFIX}" + "${BROTLI_BUILD_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}brotlicommon-static${CMAKE_STATIC_LIBRARY_SUFFIX}" + ) + endif() + + externalproject_add(brotli_ep + ${EP_COMMON_OPTIONS} + URL ${BROTLI_SOURCE_URL} + URL_HASH "SHA256=${ARROW_BROTLI_BUILD_SHA256_CHECKSUM}" + BUILD_BYPRODUCTS "${BROTLI_STATIC_LIBRARY_ENC}" + "${BROTLI_STATIC_LIBRARY_DEC}" + "${BROTLI_STATIC_LIBRARY_COMMON}" + ${BROTLI_BUILD_BYPRODUCTS} + CMAKE_ARGS ${BROTLI_CMAKE_ARGS} + STEP_TARGETS headers_copy ${BROTLI_EP_OPTIONS}) + + if(CMAKE_SYSTEM_NAME STREQUAL "Emscripten") + # Copy the libraries to our install directory manually. + set(BROTLI_BUILD_INCLUDE_DIR + ${CMAKE_CURRENT_BINARY_DIR}/brotli_ep-prefix/src/brotli_ep/c/include/brotli) + add_custom_command(TARGET brotli_ep + POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy_if_different + ${BROTLI_BUILD_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}*${CMAKE_STATIC_LIBRARY_SUFFIX} + ${BROTLI_LIB_DIR} + COMMAND ${CMAKE_COMMAND} -E copy_directory + ${BROTLI_BUILD_INCLUDE_DIR} ${BROTLI_INCLUDE_DIR}/brotli) + endif() + + file(MAKE_DIRECTORY "${BROTLI_INCLUDE_DIR}") + + add_library(Brotli::brotlicommon STATIC IMPORTED) + set_target_properties(Brotli::brotlicommon PROPERTIES IMPORTED_LOCATION + "${BROTLI_STATIC_LIBRARY_COMMON}") + target_include_directories(Brotli::brotlicommon BEFORE + INTERFACE "${BROTLI_INCLUDE_DIR}") + add_dependencies(Brotli::brotlicommon brotli_ep) + + add_library(Brotli::brotlienc STATIC IMPORTED) + set_target_properties(Brotli::brotlienc PROPERTIES IMPORTED_LOCATION + "${BROTLI_STATIC_LIBRARY_ENC}") + target_include_directories(Brotli::brotlienc BEFORE INTERFACE "${BROTLI_INCLUDE_DIR}") + add_dependencies(Brotli::brotlienc brotli_ep) + + add_library(Brotli::brotlidec STATIC IMPORTED) + set_target_properties(Brotli::brotlidec PROPERTIES IMPORTED_LOCATION + "${BROTLI_STATIC_LIBRARY_DEC}") + target_include_directories(Brotli::brotlidec BEFORE INTERFACE "${BROTLI_INCLUDE_DIR}") + add_dependencies(Brotli::brotlidec brotli_ep) + + list(APPEND + ARROW_BUNDLED_STATIC_LIBS + Brotli::brotlicommon + Brotli::brotlienc + Brotli::brotlidec) +endmacro() + +if(ARROW_WITH_BROTLI) + resolve_dependency(Brotli + HAVE_ALT + TRUE + PC_PACKAGE_NAMES + libbrotlidec + libbrotlienc) + # Order is important for static linking + set(ARROW_BROTLI_LIBS Brotli::brotlienc Brotli::brotlidec Brotli::brotlicommon) +endif() + +if(PARQUET_REQUIRE_ENCRYPTION AND NOT ARROW_PARQUET) + set(PARQUET_REQUIRE_ENCRYPTION OFF) +endif() +set(ARROW_OPENSSL_REQUIRED_VERSION "1.0.2") +set(ARROW_USE_OPENSSL OFF) +if(PARQUET_REQUIRE_ENCRYPTION + OR ARROW_AZURE + OR ARROW_FLIGHT + OR ARROW_GANDIVA + OR ARROW_GCS + OR ARROW_S3) + set(OpenSSL_SOURCE "SYSTEM") + resolve_dependency(OpenSSL + HAVE_ALT + TRUE + REQUIRED_VERSION + ${ARROW_OPENSSL_REQUIRED_VERSION}) + set(ARROW_USE_OPENSSL ON) + set(ARROW_OPENSSL_LIBS OpenSSL::Crypto OpenSSL::SSL) +endif() + +if(ARROW_USE_OPENSSL) + message(STATUS "Found OpenSSL Crypto Library: ${OPENSSL_CRYPTO_LIBRARY}") + message(STATUS "Building with OpenSSL (Version: ${OPENSSL_VERSION}) support") +else() + message(STATUS "Building without OpenSSL support. Minimum OpenSSL version ${ARROW_OPENSSL_REQUIRED_VERSION} required." + ) +endif() + +# ---------------------------------------------------------------------- +# GLOG + +macro(build_glog) + message(STATUS "Building glog from source") + set(GLOG_BUILD_DIR "${CMAKE_CURRENT_BINARY_DIR}/glog_ep-prefix/src/glog_ep") + set(GLOG_INCLUDE_DIR "${GLOG_BUILD_DIR}/include") + if(${UPPERCASE_BUILD_TYPE} STREQUAL "DEBUG") + set(GLOG_LIB_SUFFIX "d") + else() + set(GLOG_LIB_SUFFIX "") + endif() + set(GLOG_STATIC_LIB + "${GLOG_BUILD_DIR}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}glog${GLOG_LIB_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}" + ) + set(GLOG_CMAKE_CXX_FLAGS "${EP_CXX_FLAGS}") + set(GLOG_CMAKE_C_FLAGS "${EP_C_FLAGS}") + if(CMAKE_THREAD_LIBS_INIT) + string(APPEND GLOG_CMAKE_CXX_FLAGS " ${CMAKE_THREAD_LIBS_INIT}") + string(APPEND GLOG_CMAKE_C_FLAGS " ${CMAKE_THREAD_LIBS_INIT}") + endif() + + if(APPLE) + # If we don't set this flag, the binary built with 10.13 cannot be used in 10.12. + string(APPEND GLOG_CMAKE_CXX_FLAGS " -mmacosx-version-min=10.9") + endif() + + set(GLOG_CMAKE_ARGS + ${EP_COMMON_CMAKE_ARGS} + "-DCMAKE_INSTALL_PREFIX=${GLOG_BUILD_DIR}" + -DWITH_GFLAGS=OFF + -DCMAKE_CXX_FLAGS=${GLOG_CMAKE_CXX_FLAGS} + -DCMAKE_C_FLAGS=${GLOG_CMAKE_C_FLAGS}) + externalproject_add(glog_ep + ${EP_COMMON_OPTIONS} + URL ${GLOG_SOURCE_URL} + URL_HASH "SHA256=${ARROW_GLOG_BUILD_SHA256_CHECKSUM}" + BUILD_IN_SOURCE 1 + BUILD_BYPRODUCTS "${GLOG_STATIC_LIB}" + CMAKE_ARGS ${GLOG_CMAKE_ARGS}) + + file(MAKE_DIRECTORY "${GLOG_INCLUDE_DIR}") + + add_library(glog::glog STATIC IMPORTED) + set_target_properties(glog::glog PROPERTIES IMPORTED_LOCATION "${GLOG_STATIC_LIB}") + target_include_directories(glog::glog BEFORE INTERFACE "${GLOG_INCLUDE_DIR}") + add_dependencies(glog::glog glog_ep) + + list(APPEND ARROW_BUNDLED_STATIC_LIBS glog::glog) +endmacro() + +if(ARROW_USE_GLOG) + resolve_dependency(glog + HAVE_ALT + TRUE + PC_PACKAGE_NAMES + libglog) +endif() + +# ---------------------------------------------------------------------- +# gflags + +if(ARROW_BUILD_TESTS + OR ARROW_BUILD_BENCHMARKS + OR ARROW_BUILD_INTEGRATION + OR ARROW_USE_GLOG) + set(ARROW_NEED_GFLAGS TRUE) +else() + set(ARROW_NEED_GFLAGS FALSE) +endif() + +macro(build_gflags) + message(STATUS "Building gflags from source") + + set(GFLAGS_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/gflags_ep-prefix/src/gflags_ep") + set(GFLAGS_INCLUDE_DIR "${GFLAGS_PREFIX}/include") + if(${UPPERCASE_BUILD_TYPE} STREQUAL "DEBUG") + set(GFLAGS_LIB_SUFFIX "_debug") + else() + set(GFLAGS_LIB_SUFFIX "") + endif() + if(MSVC) + set(GFLAGS_STATIC_LIB "${GFLAGS_PREFIX}/lib/gflags_static${GFLAGS_LIB_SUFFIX}.lib") + else() + set(GFLAGS_STATIC_LIB "${GFLAGS_PREFIX}/lib/libgflags${GFLAGS_LIB_SUFFIX}.a") + endif() + set(GFLAGS_CMAKE_ARGS + ${EP_COMMON_CMAKE_ARGS} + "-DCMAKE_INSTALL_PREFIX=${GFLAGS_PREFIX}" + -DBUILD_PACKAGING=OFF + -DBUILD_CONFIG_TESTS=OFF + -DINSTALL_HEADERS=ON) + + file(MAKE_DIRECTORY "${GFLAGS_INCLUDE_DIR}") + externalproject_add(gflags_ep + ${EP_COMMON_OPTIONS} + URL ${GFLAGS_SOURCE_URL} + URL_HASH "SHA256=${ARROW_GFLAGS_BUILD_SHA256_CHECKSUM}" + BUILD_IN_SOURCE 1 + BUILD_BYPRODUCTS "${GFLAGS_STATIC_LIB}" + CMAKE_ARGS ${GFLAGS_CMAKE_ARGS}) + + add_thirdparty_lib(gflags::gflags_static STATIC ${GFLAGS_STATIC_LIB}) + add_dependencies(gflags::gflags_static gflags_ep) + set(GFLAGS_LIBRARY gflags::gflags_static) + set_target_properties(${GFLAGS_LIBRARY} PROPERTIES INTERFACE_COMPILE_DEFINITIONS + "GFLAGS_IS_A_DLL=0") + target_include_directories(${GFLAGS_LIBRARY} BEFORE INTERFACE "${GFLAGS_INCLUDE_DIR}") + if(MSVC) + set_target_properties(${GFLAGS_LIBRARY} PROPERTIES INTERFACE_LINK_LIBRARIES + "shlwapi.lib") + endif() + set(GFLAGS_LIBRARIES ${GFLAGS_LIBRARY}) + + set(GFLAGS_VENDORED TRUE) + + list(APPEND ARROW_BUNDLED_STATIC_LIBS gflags::gflags_static) +endmacro() + +if(ARROW_NEED_GFLAGS) + set(ARROW_GFLAGS_REQUIRED_VERSION "2.1.0") + resolve_dependency(gflags + HAVE_ALT + TRUE + REQUIRED_VERSION + ${ARROW_GFLAGS_REQUIRED_VERSION} + IS_RUNTIME_DEPENDENCY + FALSE) + + if(NOT TARGET ${GFLAGS_LIBRARIES}) + if(TARGET gflags::gflags_shared) + set(GFLAGS_LIBRARIES gflags::gflags_shared) + elseif(TARGET gflags-shared) + set(GFLAGS_LIBRARIES gflags-shared) + elseif(TARGET gflags_shared) + set(GFLAGS_LIBRARIES gflags_shared) + elseif(TARGET gflags::gflags) + set(GFLAGS_LIBRARIES gflags::gflags) + endif() + endif() +endif() + +# ---------------------------------------------------------------------- +# Thrift + +function(build_thrift) + list(APPEND CMAKE_MESSAGE_INDENT "Thrift: ") + message(STATUS "Building from source") + + if(CMAKE_VERSION VERSION_LESS 3.26) + message(FATAL_ERROR "Require CMake 3.26 or later for building bundled Apache Thrift") + endif() + set(THRIFT_PATCH_COMMAND) + if(CMAKE_CXX_COMPILER_ID STREQUAL "Clang") + find_program(PATCH patch) + if(PATCH) + list(APPEND + THRIFT_PATCH_COMMAND + ${PATCH} + -p1 + -i) + else() + find_program(GIT git) + if(GIT) + list(APPEND THRIFT_PATCH_COMMAND ${GIT} apply) + endif() + endif() + if(THRIFT_PATCH_COMMAND) + # https://github.com/apache/thrift/pull/3187 + list(APPEND THRIFT_PATCH_COMMAND ${CMAKE_CURRENT_LIST_DIR}/thrift-3187.patch) + endif() + endif() + fetchcontent_declare(thrift + ${FC_DECLARE_COMMON_OPTIONS} + PATCH_COMMAND ${THRIFT_PATCH_COMMAND} + URL ${THRIFT_SOURCE_URL} + URL_HASH "SHA256=${ARROW_THRIFT_BUILD_SHA256_CHECKSUM}") + + prepare_fetchcontent() + set(BUILD_COMPILER OFF) + set(BUILD_EXAMPLES OFF) + set(BUILD_TUTORIALS OFF) + set(CMAKE_UNITY_BUILD OFF) + set(WITH_AS3 OFF) + set(WITH_CPP ON) + set(WITH_C_GLIB OFF) + set(WITH_JAVA OFF) + set(WITH_JAVASCRIPT OFF) + set(WITH_LIBEVENT OFF) + if(MSVC) + if(ARROW_USE_STATIC_CRT) + set(WITH_MT ON) + else() + set(WITH_MT OFF) + endif() + endif() + set(WITH_NODEJS OFF) + set(WITH_OPENSSL OFF) + set(WITH_PYTHON OFF) + set(WITH_QT5 OFF) + set(WITH_ZLIB OFF) + + # Apache Thrift may change CMAKE_DEBUG_POSTFIX. So we'll restore the + # original CMAKE_DEBUG_POSTFIX later. + set(CMAKE_DEBUG_POSTFIX_KEEP ${CMAKE_DEBUG_POSTFIX}) + + # Remove Apache Arrow's CMAKE_MODULE_PATH to ensure using Apache + # Thrift's cmake_modules/. + # + # We can remove this once https://github.com/apache/thrift/pull/3176 + # is merged. + list(POP_FRONT CMAKE_MODULE_PATH) + fetchcontent_makeavailable(thrift) + + # Apache Thrift may change CMAKE_DEBUG_POSTFIX. So we restore + # CMAKE_DEBUG_POSTFIX. + set(CMAKE_DEBUG_POSTFIX + ${CMAKE_DEBUG_POSTFIX_KEEP} + CACHE BOOL "" FORCE) + + if(CMAKE_VERSION VERSION_LESS 3.28) + set_property(DIRECTORY ${thrift_SOURCE_DIR} PROPERTY EXCLUDE_FROM_ALL TRUE) + endif() + + target_include_directories(thrift + INTERFACE $ + $ + ) + if(BOOST_VENDORED) + target_link_libraries(thrift PUBLIC $) + target_link_libraries(thrift PRIVATE $) + else() + target_link_libraries(thrift INTERFACE Boost::headers) + endif() + + add_library(thrift::thrift INTERFACE IMPORTED) + target_link_libraries(thrift::thrift INTERFACE thrift) + + set(Thrift_VERSION + ${ARROW_THRIFT_BUILD_VERSION} + PARENT_SCOPE) + set(THRIFT_VENDORED + TRUE + PARENT_SCOPE) + set(ARROW_BUNDLED_STATIC_LIBS + ${ARROW_BUNDLED_STATIC_LIBS} thrift + PARENT_SCOPE) + + list(POP_BACK CMAKE_MESSAGE_INDENT) +endfunction() + +if(ARROW_WITH_THRIFT) + # Thrift C++ code generated by 0.13 requires 0.11 or greater + resolve_dependency(Thrift + ARROW_CMAKE_PACKAGE_NAME + Parquet + ARROW_PC_PACKAGE_NAME + parquet + HAVE_ALT + TRUE + PC_PACKAGE_NAMES + thrift + REQUIRED_VERSION + 0.11.0) + + string(REPLACE "." ";" Thrift_VERSION_LIST ${Thrift_VERSION}) + list(GET Thrift_VERSION_LIST 0 Thrift_VERSION_MAJOR) + list(GET Thrift_VERSION_LIST 1 Thrift_VERSION_MINOR) + list(GET Thrift_VERSION_LIST 2 Thrift_VERSION_PATCH) +endif() + +# ---------------------------------------------------------------------- +# Protocol Buffers (required for ORC, Flight and Substrait libraries) + +function(build_protobuf) + list(APPEND CMAKE_MESSAGE_INDENT "Protobuf: ") + message(STATUS "Building Protocol Buffers from source using FetchContent") + set(PROTOBUF_VENDORED + TRUE + PARENT_SCOPE) + set(PROTOBUF_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/protobuf_fc-install") + set(PROTOBUF_PREFIX + "${PROTOBUF_PREFIX}" + PARENT_SCOPE) + set(PROTOBUF_INCLUDE_DIR "${PROTOBUF_PREFIX}/include") + set(PROTOBUF_INCLUDE_DIR + "${PROTOBUF_INCLUDE_DIR}" + PARENT_SCOPE) + + fetchcontent_declare(protobuf + ${FC_DECLARE_COMMON_OPTIONS} OVERRIDE_FIND_PACKAGE + URL ${PROTOBUF_SOURCE_URL} + URL_HASH "SHA256=${ARROW_PROTOBUF_BUILD_SHA256_CHECKSUM}" + SOURCE_SUBDIR cmake) + + prepare_fetchcontent() + + # This flag is based on what the user initially requested but if + # we've fallen back to building protobuf we always build it statically + # so we need to reset the flag so that we can link against it correctly + # later. + set(Protobuf_USE_STATIC_LIBS ON) + + # Strip lto flags (which may be added by dh_auto_configure) + # See https://github.com/protocolbuffers/protobuf/issues/7092 + string(REPLACE "-flto=auto" "" CMAKE_C_FLAGS "${CMAKE_C_FLAGS}") + string(REPLACE "-ffat-lto-objects" "" CMAKE_C_FLAGS "${CMAKE_C_FLAGS}") + string(REPLACE "-flto=auto" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") + string(REPLACE "-ffat-lto-objects" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") + + set(protobuf_BUILD_TESTS OFF) + if(MSVC AND NOT ARROW_USE_STATIC_CRT) + set(protobuf_MSVC_STATIC_RUNTIME OFF) + endif() + + # Unity build causes some build errors + set(CMAKE_UNITY_BUILD OFF) + + fetchcontent_makeavailable(protobuf) + + # Get the actual include directory from the Protobuf target. + # For FetchContent, this points to the source directory which contains the .proto files. + set(PROTOBUF_INCLUDE_DIR "${protobuf_SOURCE_DIR}/src") + # For compatibility of CMake's FindProtobuf.cmake. + set(Protobuf_INCLUDE_DIRS "${PROTOBUF_INCLUDE_DIR}") + set(Protobuf_INCLUDE_DIRS + "${Protobuf_INCLUDE_DIRS}" + PARENT_SCOPE) + # Set import dirs so protoc can find well-known types like timestamp.proto + set(Protobuf_IMPORT_DIRS "${PROTOBUF_INCLUDE_DIR}") + set(Protobuf_IMPORT_DIRS + "${Protobuf_IMPORT_DIRS}" + PARENT_SCOPE) + + # For FetchContent builds, protoc and libprotoc are regular targets, not imported. + # We can't get their locations at configure time, so we set placeholders. + # The actual locations will be resolved at build time or by the install step. + set(PROTOBUF_COMPILER "$") + set(PROTOC_STATIC_LIB "$") + set(Protobuf_PROTOC_LIBRARY "${PROTOC_STATIC_LIB}") + + # gRPC requires Protobuf to be installed to a known location. + # We have to do this in two steps to avoid double installation of Protobuf + # when Arrow is installed. + # This custom target ensures Protobuf is built before we install + add_custom_target(protobuf_built + DEPENDS protobuf::libprotobuf protobuf::libprotobuf-lite + protobuf::libprotoc protobuf::protoc) + + # Disable Protobuf's install script after it's built to prevent double installation + add_custom_command(OUTPUT "${protobuf_BINARY_DIR}/cmake_install.cmake.saved" + COMMAND ${CMAKE_COMMAND} -E copy_if_different + "${protobuf_BINARY_DIR}/cmake_install.cmake" + "${protobuf_BINARY_DIR}/cmake_install.cmake.saved" + COMMAND ${CMAKE_COMMAND} -E echo + "# Protobuf install disabled to prevent double installation with Arrow" + > "${protobuf_BINARY_DIR}/cmake_install.cmake" + DEPENDS protobuf_built + COMMENT "Disabling Protobuf install to prevent double installation" + VERBATIM) + + add_custom_target(protobuf_install_disabled ALL + DEPENDS "${protobuf_BINARY_DIR}/cmake_install.cmake.saved") + + # Install Protobuf to PROTOBUF_PREFIX for dependendants to find + add_custom_command(OUTPUT "${PROTOBUF_PREFIX}/.protobuf_installed" + COMMAND ${CMAKE_COMMAND} -E copy_if_different + "${protobuf_BINARY_DIR}/cmake_install.cmake.saved" + "${protobuf_BINARY_DIR}/cmake_install.cmake.tmp" + COMMAND ${CMAKE_COMMAND} -DCMAKE_INSTALL_PREFIX=${PROTOBUF_PREFIX} + -DCMAKE_INSTALL_CONFIG_NAME=$ -P + "${protobuf_BINARY_DIR}/cmake_install.cmake.tmp" || + ${CMAKE_COMMAND} -E true + COMMAND ${CMAKE_COMMAND} -E touch + "${PROTOBUF_PREFIX}/.protobuf_installed" + DEPENDS protobuf_install_disabled + COMMENT "Installing Protobuf to ${PROTOBUF_PREFIX} for gRPC" + VERBATIM) + + # Make protobuf_fc depend on the install completion marker + add_custom_target(protobuf_fc DEPENDS "${PROTOBUF_PREFIX}/.protobuf_installed") + set(ARROW_BUNDLED_STATIC_LIBS + ${ARROW_BUNDLED_STATIC_LIBS} protobuf::libprotobuf + PARENT_SCOPE) + + if(CMAKE_CROSSCOMPILING) + # If we are cross compiling, we need to build protoc for the host + # system also, as it is used when building Arrow + set(PROTOBUF_HOST_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/protobuf_ep_host-install") + set(PROTOBUF_HOST_COMPILER "${PROTOBUF_HOST_PREFIX}/bin/protoc") + + set(PROTOBUF_HOST_CMAKE_ARGS + "-DCMAKE_CXX_FLAGS=" + "-DCMAKE_C_FLAGS=" + "-DCMAKE_INSTALL_PREFIX=${PROTOBUF_HOST_PREFIX}" + -Dprotobuf_BUILD_TESTS=OFF + -Dprotobuf_DEBUG_POSTFIX=) + + # We reuse the FetchContent downloaded source but build it with host compiler + externalproject_add(protobuf_ep_host + ${EP_COMMON_OPTIONS} + CMAKE_ARGS ${PROTOBUF_HOST_CMAKE_ARGS} + SOURCE_DIR "${protobuf_SOURCE_DIR}" + SOURCE_SUBDIR cmake + BUILD_BYPRODUCTS "${PROTOBUF_HOST_COMPILER}" + DOWNLOAD_COMMAND "" DOWNLOAD_EXTRACT_TIMESTAMP TRUE) + + add_executable(arrow::protobuf::host_protoc IMPORTED) + set_target_properties(arrow::protobuf::host_protoc + PROPERTIES IMPORTED_LOCATION "${PROTOBUF_HOST_COMPILER}") + + add_dependencies(arrow::protobuf::host_protoc protobuf_ep_host) + endif() + list(POP_BACK CMAKE_MESSAGE_INDENT) +endfunction() + +if(ARROW_WITH_PROTOBUF) + if(ARROW_FLIGHT_SQL) + # Flight SQL uses proto3 optionals, which require 3.12 or later. + # 3.12.0-3.14.0: need --experimental_allow_proto3_optional + # 3.15.0-: don't need --experimental_allow_proto3_optional + set(ARROW_PROTOBUF_REQUIRED_VERSION "3.12.0") + elseif(ARROW_SUBSTRAIT) + # Substrait protobuf files use proto3 syntax + set(ARROW_PROTOBUF_REQUIRED_VERSION "3.0.0") + else() + set(ARROW_PROTOBUF_REQUIRED_VERSION "2.6.1") + endif() + if(ARROW_ORC + OR ARROW_SUBSTRAIT + OR ARROW_WITH_OPENTELEMETRY) + set(ARROW_PROTOBUF_ARROW_CMAKE_PACKAGE_NAME "Arrow") + set(ARROW_PROTOBUF_ARROW_PC_PACKAGE_NAME "arrow") + elseif(ARROW_FLIGHT) + set(ARROW_PROTOBUF_ARROW_CMAKE_PACKAGE_NAME "ArrowFlight") + set(ARROW_PROTOBUF_ARROW_PC_PACKAGE_NAME "arrow-flight") + else() + message(FATAL_ERROR "ARROW_WITH_PROTOBUF must be propagated in the build tooling installation." + " Please extend the mappings of ARROW_PROTOBUF_ARROW_CMAKE_PACKAGE_NAME and" + " ARROW_PROTOBUF_ARROW_PC_PACKAGE_NAME for newly introduced dependencies on" + " protobuf.") + endif() + # We need to use FORCE_ANY_NEWER_VERSION here to accept Protobuf + # newer version such as 23.4. If we don't use it, 23.4 is processed + # as an incompatible version with 3.12.0 with protobuf-config.cmake + # provided by Protobuf. Because protobuf-config-version.cmake + # requires the same major version. In the example, "23" for 23.4 and + # "3" for 3.12.0 are different. So 23.4 is rejected with 3.12.0. If + # we use FORCE_ANY_NEWER_VERSION here, we can bypass the check and + # use 23.4. + resolve_dependency(Protobuf + ARROW_CMAKE_PACKAGE_NAME + ${ARROW_PROTOBUF_ARROW_CMAKE_PACKAGE_NAME} + ARROW_PC_PACKAGE_NAME + ${ARROW_PROTOBUF_ARROW_PC_PACKAGE_NAME} + FORCE_ANY_NEWER_VERSION + TRUE + HAVE_ALT + TRUE + PC_PACKAGE_NAMES + protobuf + REQUIRED_VERSION + ${ARROW_PROTOBUF_REQUIRED_VERSION}) + + # If PROTOBUF_VENDORED we build static protobuf from source via FetchContent + if(NOT PROTOBUF_VENDORED + AND NOT Protobuf_USE_STATIC_LIBS + AND MSVC_TOOLCHAIN) + add_definitions(-DPROTOBUF_USE_DLLS) + endif() + + if(TARGET arrow::protobuf::libprotobuf) + set(ARROW_PROTOBUF_LIBPROTOBUF arrow::protobuf::libprotobuf) + else() + set(ARROW_PROTOBUF_LIBPROTOBUF protobuf::libprotobuf) + endif() + if(TARGET arrow::protobuf::libprotoc) + set(ARROW_PROTOBUF_LIBPROTOC arrow::protobuf::libprotoc) + else() + set(ARROW_PROTOBUF_LIBPROTOC protobuf::libprotoc) + endif() + if(TARGET arrow::protobuf::host_protoc) + # make sure host protoc is used for compiling protobuf files + # during build of e.g. orc + set(ARROW_PROTOBUF_PROTOC arrow::protobuf::host_protoc) + elseif(TARGET arrow::protobuf::protoc) + set(ARROW_PROTOBUF_PROTOC arrow::protobuf::protoc) + else() + if(NOT TARGET protobuf::protoc) + add_executable(protobuf::protoc IMPORTED) + set_target_properties(protobuf::protoc PROPERTIES IMPORTED_LOCATION + "${PROTOBUF_PROTOC_EXECUTABLE}") + endif() + set(ARROW_PROTOBUF_PROTOC protobuf::protoc) + endif() + + # Log protobuf paths as we often see issues with mixed sources for + # the libraries and protoc. + get_target_property(PROTOBUF_PROTOC_EXECUTABLE ${ARROW_PROTOBUF_PROTOC} + IMPORTED_LOCATION_RELEASE) + if(NOT PROTOBUF_PROTOC_EXECUTABLE) + get_target_property(PROTOBUF_PROTOC_EXECUTABLE ${ARROW_PROTOBUF_PROTOC} + IMPORTED_LOCATION) + endif() + message(STATUS "Found protoc: ${PROTOBUF_PROTOC_EXECUTABLE}") + get_target_property(PROTOBUF_TYPE ${ARROW_PROTOBUF_LIBPROTOBUF} TYPE) + if(NOT STREQUAL "INTERFACE_LIBRARY") + # Protobuf_PROTOC_LIBRARY is set by all versions of FindProtobuf.cmake + message(STATUS "Found libprotoc: ${Protobuf_PROTOC_LIBRARY}") + get_target_property(PROTOBUF_LIBRARY ${ARROW_PROTOBUF_LIBPROTOBUF} IMPORTED_LOCATION) + message(STATUS "Found libprotobuf: ${PROTOBUF_LIBRARY}") + message(STATUS "Found protobuf headers: ${PROTOBUF_INCLUDE_DIR}") + endif() +endif() + +# ---------------------------------------------------------------------- +# Substrait (required by compute engine) + +macro(build_substrait) + message(STATUS "Building Substrait from source") + + # Note: not all protos in Substrait actually matter to plan + # consumption. No need to build the ones we don't need. + set(SUBSTRAIT_PROTOS + algebra + extended_expression + extensions/extensions + plan + type) + set(ARROW_SUBSTRAIT_PROTOS extension_rels) + set(ARROW_SUBSTRAIT_PROTOS_DIR "${CMAKE_SOURCE_DIR}/proto") + + externalproject_add(substrait_ep + ${EP_COMMON_OPTIONS} + CONFIGURE_COMMAND "" + BUILD_COMMAND "" + INSTALL_COMMAND "" + URL ${SUBSTRAIT_SOURCE_URL} + URL_HASH "SHA256=${ARROW_SUBSTRAIT_BUILD_SHA256_CHECKSUM}") + + externalproject_get_property(substrait_ep SOURCE_DIR) + set(SUBSTRAIT_LOCAL_DIR ${SOURCE_DIR}) + + set(SUBSTRAIT_CPP_DIR "${CMAKE_CURRENT_BINARY_DIR}/substrait_ep-generated") + file(MAKE_DIRECTORY ${SUBSTRAIT_CPP_DIR}) + + set(SUBSTRAIT_SUPPRESSED_FLAGS) + if(MSVC) + # Protobuf generated files trigger some spurious warnings on MSVC. + + # Implicit conversion from uint64_t to uint32_t: + list(APPEND SUBSTRAIT_SUPPRESSED_FLAGS "/wd4244") + + # Missing dll-interface: + list(APPEND SUBSTRAIT_SUPPRESSED_FLAGS "/wd4251") + else() + # GH-44954: silence [[deprecated]] declarations in protobuf-generated code + list(APPEND SUBSTRAIT_SUPPRESSED_FLAGS "-Wno-deprecated") + if(CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang" OR CMAKE_CXX_COMPILER_ID STREQUAL + "Clang") + # Protobuf generated files trigger some errors on CLANG TSAN builds + list(APPEND SUBSTRAIT_SUPPRESSED_FLAGS "-Wno-error=shorten-64-to-32") + endif() + endif() + + set(SUBSTRAIT_SOURCES) + set(SUBSTRAIT_PROTO_GEN_ALL) + foreach(SUBSTRAIT_PROTO ${SUBSTRAIT_PROTOS}) + set(SUBSTRAIT_PROTO_GEN "${SUBSTRAIT_CPP_DIR}/substrait/${SUBSTRAIT_PROTO}.pb") + + foreach(EXT h cc) + set_source_files_properties("${SUBSTRAIT_PROTO_GEN}.${EXT}" + PROPERTIES COMPILE_OPTIONS + "${SUBSTRAIT_SUPPRESSED_FLAGS}" + GENERATED TRUE + SKIP_UNITY_BUILD_INCLUSION TRUE) + list(APPEND SUBSTRAIT_PROTO_GEN_ALL "${SUBSTRAIT_PROTO_GEN}.${EXT}") + endforeach() + set(SUBSTRAIT_PROTOC_INCLUDES "-I${SUBSTRAIT_LOCAL_DIR}/proto") + if(PROTOBUF_VENDORED AND Protobuf_INCLUDE_DIRS) + list(APPEND SUBSTRAIT_PROTOC_INCLUDES "-I${Protobuf_INCLUDE_DIRS}") + endif() + add_custom_command(OUTPUT "${SUBSTRAIT_PROTO_GEN}.cc" "${SUBSTRAIT_PROTO_GEN}.h" + COMMAND ${ARROW_PROTOBUF_PROTOC} ${SUBSTRAIT_PROTOC_INCLUDES} + "--cpp_out=${SUBSTRAIT_CPP_DIR}" + "${SUBSTRAIT_LOCAL_DIR}/proto/substrait/${SUBSTRAIT_PROTO}.proto" + DEPENDS ${PROTO_DEPENDS} substrait_ep) + + list(APPEND SUBSTRAIT_SOURCES "${SUBSTRAIT_PROTO_GEN}.cc") + endforeach() + foreach(ARROW_SUBSTRAIT_PROTO ${ARROW_SUBSTRAIT_PROTOS}) + set(ARROW_SUBSTRAIT_PROTO_GEN + "${SUBSTRAIT_CPP_DIR}/substrait/${ARROW_SUBSTRAIT_PROTO}.pb") + foreach(EXT h cc) + set_source_files_properties("${ARROW_SUBSTRAIT_PROTO_GEN}.${EXT}" + PROPERTIES COMPILE_OPTIONS + "${SUBSTRAIT_SUPPRESSED_FLAGS}" + GENERATED TRUE + SKIP_UNITY_BUILD_INCLUSION TRUE) + list(APPEND SUBSTRAIT_PROTO_GEN_ALL "${ARROW_SUBSTRAIT_PROTO_GEN}.${EXT}") + endforeach() + set(ARROW_SUBSTRAIT_PROTOC_INCLUDES ${SUBSTRAIT_PROTOC_INCLUDES} + "-I${ARROW_SUBSTRAIT_PROTOS_DIR}") + add_custom_command(OUTPUT "${ARROW_SUBSTRAIT_PROTO_GEN}.cc" + "${ARROW_SUBSTRAIT_PROTO_GEN}.h" + COMMAND ${ARROW_PROTOBUF_PROTOC} ${ARROW_SUBSTRAIT_PROTOC_INCLUDES} + "--cpp_out=${SUBSTRAIT_CPP_DIR}" + "${ARROW_SUBSTRAIT_PROTOS_DIR}/substrait/${ARROW_SUBSTRAIT_PROTO}.proto" + DEPENDS ${PROTO_DEPENDS} substrait_ep) + + list(APPEND SUBSTRAIT_SOURCES "${ARROW_SUBSTRAIT_PROTO_GEN}.cc") + endforeach() + + add_custom_target(substrait_gen ALL DEPENDS ${SUBSTRAIT_PROTO_GEN_ALL}) + + set(SUBSTRAIT_INCLUDES ${SUBSTRAIT_CPP_DIR} ${PROTOBUF_INCLUDE_DIR}) + + add_library(substrait STATIC ${SUBSTRAIT_SOURCES}) + set_target_properties(substrait PROPERTIES POSITION_INDEPENDENT_CODE ON) + target_compile_options(substrait PRIVATE "${SUBSTRAIT_SUPPRESSED_FLAGS}") + target_include_directories(substrait PUBLIC ${SUBSTRAIT_INCLUDES}) + target_link_libraries(substrait PUBLIC ${ARROW_PROTOBUF_LIBPROTOBUF}) + add_dependencies(substrait substrait_gen) + + list(APPEND ARROW_BUNDLED_STATIC_LIBS substrait) +endmacro() + +if(ARROW_SUBSTRAIT) + # Currently, we can only build Substrait from source. + set(Substrait_SOURCE "BUNDLED") + resolve_dependency(Substrait) +endif() + +# ---------------------------------------------------------------------- +# jemalloc - Unix-only high-performance allocator + +macro(build_jemalloc) + # Our build of jemalloc is specially prefixed so that it will not + # conflict with the default allocator as well as other jemalloc + # installations. + + message(STATUS "Building jemalloc from source") + + set(ARROW_JEMALLOC_USE_SHARED OFF) + set(JEMALLOC_PREFIX + "${CMAKE_CURRENT_BINARY_DIR}/jemalloc_ep-prefix/src/jemalloc_ep/dist/") + set(JEMALLOC_LIB_DIR "${JEMALLOC_PREFIX}/lib") + set(JEMALLOC_STATIC_LIB + "${JEMALLOC_LIB_DIR}/libjemalloc_pic${CMAKE_STATIC_LIBRARY_SUFFIX}") + set(JEMALLOC_CONFIGURE_COMMAND ./configure "AR=${CMAKE_AR}" "CC=${CMAKE_C_COMPILER}") + if(CMAKE_OSX_SYSROOT) + list(APPEND JEMALLOC_CONFIGURE_COMMAND "SDKROOT=${CMAKE_OSX_SYSROOT}") + endif() + if(DEFINED ARROW_JEMALLOC_LG_PAGE) + # Used for arm64 manylinux wheels in order to make the wheel work on both + # 4k and 64k page arm64 systems. + list(APPEND JEMALLOC_CONFIGURE_COMMAND "--with-lg-page=${ARROW_JEMALLOC_LG_PAGE}") + endif() + list(APPEND + JEMALLOC_CONFIGURE_COMMAND + "--prefix=${JEMALLOC_PREFIX}" + "--libdir=${JEMALLOC_LIB_DIR}" + "--with-jemalloc-prefix=je_arrow_" + "--with-private-namespace=je_arrow_private_" + "--without-export" + "--disable-shared" + # Don't override operator new() + "--disable-cxx" + "--disable-libdl" + # See https://github.com/jemalloc/jemalloc/issues/1237 + "--disable-initial-exec-tls" + ${EP_LOG_OPTIONS}) + if(${UPPERCASE_BUILD_TYPE} STREQUAL "DEBUG") + # Enable jemalloc debug checks when Arrow itself has debugging enabled + list(APPEND JEMALLOC_CONFIGURE_COMMAND "--enable-debug") + endif() + + set(JEMALLOC_BUILD_COMMAND ${MAKE} ${MAKE_BUILD_ARGS}) + + if(CMAKE_OSX_SYSROOT) + list(APPEND JEMALLOC_BUILD_COMMAND "SDKROOT=${CMAKE_OSX_SYSROOT}") + endif() + + externalproject_add(jemalloc_ep + ${EP_COMMON_OPTIONS} + URL ${JEMALLOC_SOURCE_URL} + URL_HASH "SHA256=${ARROW_JEMALLOC_BUILD_SHA256_CHECKSUM}" + PATCH_COMMAND touch doc/jemalloc.3 doc/jemalloc.html + # The prefix "je_arrow_" must be kept in sync with the value in memory_pool.cc + CONFIGURE_COMMAND ${JEMALLOC_CONFIGURE_COMMAND} + BUILD_IN_SOURCE 1 + BUILD_COMMAND ${JEMALLOC_BUILD_COMMAND} + BUILD_BYPRODUCTS "${JEMALLOC_STATIC_LIB}" + INSTALL_COMMAND ${MAKE} -j1 install) + + # Don't use the include directory directly so that we can point to a path + # that is unique to our codebase. + set(JEMALLOC_INCLUDE_DIR "${CMAKE_CURRENT_BINARY_DIR}/jemalloc_ep-prefix/src/") + # The include directory must exist before it is referenced by a target. + file(MAKE_DIRECTORY "${JEMALLOC_INCLUDE_DIR}") + add_library(jemalloc::jemalloc STATIC IMPORTED) + set_target_properties(jemalloc::jemalloc PROPERTIES IMPORTED_LOCATION + "${JEMALLOC_STATIC_LIB}") + target_link_libraries(jemalloc::jemalloc INTERFACE Threads::Threads) + target_include_directories(jemalloc::jemalloc BEFORE + INTERFACE "${JEMALLOC_INCLUDE_DIR}") + add_dependencies(jemalloc::jemalloc jemalloc_ep) + + list(APPEND ARROW_BUNDLED_STATIC_LIBS jemalloc::jemalloc) + + set(jemalloc_VENDORED TRUE) + # For config.h.cmake + set(ARROW_JEMALLOC_VENDORED ${jemalloc_VENDORED}) +endmacro() + +if(ARROW_JEMALLOC) + if(NOT ARROW_ENABLE_THREADING) + message(FATAL_ERROR "Can't use jemalloc with ARROW_ENABLE_THREADING=OFF") + endif() + resolve_dependency(jemalloc HAVE_ALT TRUE) +endif() + +# ---------------------------------------------------------------------- +# mimalloc - Cross-platform high-performance allocator, from Microsoft + +if(ARROW_MIMALLOC) + if(NOT ARROW_ENABLE_THREADING) + message(FATAL_ERROR "Can't use mimalloc with ARROW_ENABLE_THREADING=OFF") + endif() + + message(STATUS "Building (vendored) mimalloc from source") + # We only use a vendored mimalloc as we want to control its build options. + + set(MIMALLOC_LIB_BASE_NAME "mimalloc") + if(${UPPERCASE_BUILD_TYPE} STREQUAL "DEBUG") + set(MIMALLOC_LIB_BASE_NAME "${MIMALLOC_LIB_BASE_NAME}-${LOWERCASE_BUILD_TYPE}") + endif() + + set(MIMALLOC_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/mimalloc_ep/src/mimalloc_ep") + set(MIMALLOC_INCLUDE_DIR "${MIMALLOC_PREFIX}/include") + set(MIMALLOC_STATIC_LIB + "${MIMALLOC_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}${MIMALLOC_LIB_BASE_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX}" + ) + + set(MIMALLOC_C_FLAGS ${EP_C_FLAGS}) + if(MINGW) + # Workaround https://github.com/microsoft/mimalloc/issues/910 on RTools40 + set(MIMALLOC_C_FLAGS "${MIMALLOC_C_FLAGS} -DERROR_COMMITMENT_MINIMUM=635") + endif() + + set(MIMALLOC_CMAKE_ARGS + ${EP_COMMON_CMAKE_ARGS} + "-DCMAKE_C_FLAGS=${MIMALLOC_C_FLAGS}" + "-DCMAKE_INSTALL_PREFIX=${MIMALLOC_PREFIX}" + -DMI_INSTALL_TOPLEVEL=ON + -DMI_OVERRIDE=OFF + -DMI_LOCAL_DYNAMIC_TLS=ON + -DMI_BUILD_OBJECT=OFF + -DMI_BUILD_SHARED=OFF + -DMI_BUILD_TESTS=OFF + # GH-47229: Force mimalloc to generate armv8.0 binary + -DMI_NO_OPT_ARCH=ON) + + externalproject_add(mimalloc_ep + ${EP_COMMON_OPTIONS} + URL ${MIMALLOC_SOURCE_URL} + URL_HASH "SHA256=${ARROW_MIMALLOC_BUILD_SHA256_CHECKSUM}" + CMAKE_ARGS ${MIMALLOC_CMAKE_ARGS} + BUILD_BYPRODUCTS "${MIMALLOC_STATIC_LIB}") + + file(MAKE_DIRECTORY ${MIMALLOC_INCLUDE_DIR}) + + add_library(mimalloc::mimalloc STATIC IMPORTED) + set_target_properties(mimalloc::mimalloc PROPERTIES IMPORTED_LOCATION + "${MIMALLOC_STATIC_LIB}") + target_include_directories(mimalloc::mimalloc BEFORE + INTERFACE "${MIMALLOC_INCLUDE_DIR}") + target_link_libraries(mimalloc::mimalloc INTERFACE Threads::Threads) + if(WIN32) + target_link_libraries(mimalloc::mimalloc INTERFACE "bcrypt.lib" "psapi.lib") + endif() + add_dependencies(mimalloc::mimalloc mimalloc_ep) + + list(APPEND ARROW_BUNDLED_STATIC_LIBS mimalloc::mimalloc) + + set(mimalloc_VENDORED TRUE) +endif() + +# ---------------------------------------------------------------------- +# Google gtest + +function(build_gtest) + message(STATUS "Building gtest from source") + set(GTEST_VENDORED TRUE) + fetchcontent_declare(googletest + # We should not specify "EXCLUDE_FROM_ALL TRUE" here. + # Because we install GTest with custom path. + # ${FC_DECLARE_COMMON_OPTIONS} + URL ${GTEST_SOURCE_URL} + URL_HASH "SHA256=${ARROW_GTEST_BUILD_SHA256_CHECKSUM}") + prepare_fetchcontent() + # We can remove this once we remove set(CMAKE_POLICY_VERSION_MINIMUM + # 3.5) from prepare_fetchcontent(). + unset(CMAKE_POLICY_VERSION_MINIMUM) + if(APPLE) + string(APPEND CMAKE_CXX_FLAGS " -Wno-unused-value" " -Wno-ignored-attributes") + endif() + # If we're building static libs for Emscripten, we need to build *everything* as + # static libs. + if(CMAKE_SYSTEM_NAME STREQUAL "Emscripten") + set(BUILD_SHARED_LIBS OFF) + set(BUILD_STATIC_LIBS ON) + else() + set(BUILD_SHARED_LIBS ON) + set(BUILD_STATIC_LIBS OFF) + endif() + # We need to use "cache" variable to override the default + # INSTALL_GTEST option by this value. See also: + # https://cmake.org/cmake/help/latest/policy/CMP0077.html + set(INSTALL_GTEST + OFF + CACHE "BOOL" + "Enable installation of googletest. (Projects embedding googletest may want to turn this OFF.)" + FORCE) + string(APPEND CMAKE_INSTALL_INCLUDEDIR "/arrow-gtest") + fetchcontent_makeavailable(googletest) + foreach(target gmock gmock_main gtest gtest_main) + set_target_properties(${target} + PROPERTIES OUTPUT_NAME "arrow_${target}" + PDB_NAME "arrow_${target}" + PDB_NAME_DEBUG "arrow_${target}d" + COMPILE_PDB_NAME "arrow_${target}" + COMPILE_PDB_NAME_DEBUG "arrow_${target}d" + RUNTIME_OUTPUT_DIRECTORY + "${BUILD_OUTPUT_ROOT_DIRECTORY}" + LIBRARY_OUTPUT_DIRECTORY + "${BUILD_OUTPUT_ROOT_DIRECTORY}" + ARCHIVE_OUTPUT_DIRECTORY + "${BUILD_OUTPUT_ROOT_DIRECTORY}" + PDB_OUTPUT_DIRECTORY + "${BUILD_OUTPUT_ROOT_DIRECTORY}") + endforeach() + install(DIRECTORY "${googletest_SOURCE_DIR}/googlemock/include/" + "${googletest_SOURCE_DIR}/googletest/include/" + DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}") + add_library(arrow::GTest::gtest_headers INTERFACE IMPORTED) + target_include_directories(arrow::GTest::gtest_headers + INTERFACE "${googletest_SOURCE_DIR}/googlemock/include/" + "${googletest_SOURCE_DIR}/googletest/include/") + install(TARGETS gmock gmock_main gtest gtest_main + EXPORT arrow_testing_targets + RUNTIME DESTINATION "${CMAKE_INSTALL_BINDIR}" + ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}" + LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}") + if(MSVC) + install(FILES $ $ + $ $ + DESTINATION "${CMAKE_INSTALL_BINDIR}" + OPTIONAL) + endif() + add_library(arrow::GTest::gmock ALIAS gmock) + add_library(arrow::GTest::gmock_main ALIAS gmock_main) + add_library(arrow::GTest::gtest ALIAS gtest) + add_library(arrow::GTest::gtest_main ALIAS gtest_main) +endfunction() + +if(ARROW_TESTING) + set(GTestAlt_NEED_CXX_STANDARD_CHECK TRUE) + resolve_dependency(GTest + ARROW_CMAKE_PACKAGE_NAME + ArrowTesting + HAVE_ALT + TRUE + REQUIRED_VERSION + 1.10.0) + + if(GTest_SOURCE STREQUAL "SYSTEM") + find_package(PkgConfig QUIET) + pkg_check_modules(gtest_PC + gtest + NO_CMAKE_PATH + NO_CMAKE_ENVIRONMENT_PATH + QUIET) + if(gtest_PC_FOUND) + string(APPEND ARROW_TESTING_PC_REQUIRES " gtest") + else() + string(APPEND ARROW_TESTING_PC_CFLAGS " -I$") + string(APPEND ARROW_TESTING_PC_CFLAGS ",-I>") + + string(APPEND ARROW_TESTING_PC_LIBS " $") + endif() + set(ARROW_GTEST_GTEST_HEADERS) + set(ARROW_GTEST_GMOCK GTest::gmock) + set(ARROW_GTEST_GTEST GTest::gtest) + set(ARROW_GTEST_GTEST_MAIN GTest::gtest_main) + set(ARROW_GTEST_GMOCK_MAIN GTest::gmock_main) + else() + string(APPEND ARROW_TESTING_PC_CFLAGS " -I\${includedir}/arrow-gtest") + string(APPEND ARROW_TESTING_PC_LIBS " -larrow_gtest") + set(ARROW_GTEST_GTEST_HEADERS arrow::GTest::gtest_headers) + set(ARROW_GTEST_GMOCK arrow::GTest::gmock) + set(ARROW_GTEST_GTEST arrow::GTest::gtest) + set(ARROW_GTEST_GTEST_MAIN arrow::GTest::gtest_main) + set(ARROW_GTEST_GMOCK_MAIN arrow::GTest::gmock_main) + endif() +endif() + +macro(build_benchmark) + message(STATUS "Building benchmark from source") + + set(GBENCHMARK_CMAKE_CXX_FLAGS "${EP_CXX_FLAGS}") + if(APPLE AND (CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang" OR CMAKE_CXX_COMPILER_ID + STREQUAL "Clang")) + string(APPEND GBENCHMARK_CMAKE_CXX_FLAGS " -stdlib=libc++") + endif() + + set(GBENCHMARK_PREFIX + "${CMAKE_CURRENT_BINARY_DIR}/gbenchmark_ep/src/gbenchmark_ep-install") + set(GBENCHMARK_INCLUDE_DIR "${GBENCHMARK_PREFIX}/include") + set(GBENCHMARK_STATIC_LIB + "${GBENCHMARK_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}benchmark${CMAKE_STATIC_LIBRARY_SUFFIX}" + ) + set(GBENCHMARK_MAIN_STATIC_LIB + "${GBENCHMARK_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}benchmark_main${CMAKE_STATIC_LIBRARY_SUFFIX}" + ) + set(GBENCHMARK_CMAKE_ARGS + ${EP_COMMON_CMAKE_ARGS} "-DCMAKE_INSTALL_PREFIX=${GBENCHMARK_PREFIX}" + -DBENCHMARK_ENABLE_TESTING=OFF -DCMAKE_CXX_FLAGS=${GBENCHMARK_CMAKE_CXX_FLAGS}) + if(APPLE) + set(GBENCHMARK_CMAKE_ARGS ${GBENCHMARK_CMAKE_ARGS} "-DBENCHMARK_USE_LIBCXX=ON") + endif() + + externalproject_add(gbenchmark_ep + ${EP_COMMON_OPTIONS} + URL ${GBENCHMARK_SOURCE_URL} + URL_HASH "SHA256=${ARROW_GBENCHMARK_BUILD_SHA256_CHECKSUM}" + BUILD_BYPRODUCTS "${GBENCHMARK_STATIC_LIB}" + "${GBENCHMARK_MAIN_STATIC_LIB}" + CMAKE_ARGS ${GBENCHMARK_CMAKE_ARGS}) + + # The include directory must exist before it is referenced by a target. + file(MAKE_DIRECTORY "${GBENCHMARK_INCLUDE_DIR}") + + add_library(benchmark::benchmark STATIC IMPORTED) + set_target_properties(benchmark::benchmark PROPERTIES IMPORTED_LOCATION + "${GBENCHMARK_STATIC_LIB}") + target_include_directories(benchmark::benchmark BEFORE + INTERFACE "${GBENCHMARK_INCLUDE_DIR}") + target_compile_definitions(benchmark::benchmark INTERFACE "BENCHMARK_STATIC_DEFINE") + + add_library(benchmark::benchmark_main STATIC IMPORTED) + set_target_properties(benchmark::benchmark_main + PROPERTIES IMPORTED_LOCATION "${GBENCHMARK_MAIN_STATIC_LIB}") + target_include_directories(benchmark::benchmark_main BEFORE + INTERFACE "${GBENCHMARK_INCLUDE_DIR}") + target_link_libraries(benchmark::benchmark_main INTERFACE benchmark::benchmark) + + add_dependencies(benchmark::benchmark gbenchmark_ep) + add_dependencies(benchmark::benchmark_main gbenchmark_ep) +endmacro() + +if(ARROW_BUILD_BENCHMARKS) + set(BENCHMARK_REQUIRED_VERSION 1.6.1) + resolve_dependency(benchmark + REQUIRED_VERSION + ${BENCHMARK_REQUIRED_VERSION} + IS_RUNTIME_DEPENDENCY + FALSE) +endif() + +macro(build_rapidjson) + message(STATUS "Building RapidJSON from source") + set(RAPIDJSON_PREFIX + "${CMAKE_CURRENT_BINARY_DIR}/rapidjson_ep/src/rapidjson_ep-install") + set(RAPIDJSON_CMAKE_ARGS + ${EP_COMMON_CMAKE_ARGS} + -DRAPIDJSON_BUILD_DOC=OFF + -DRAPIDJSON_BUILD_EXAMPLES=OFF + -DRAPIDJSON_BUILD_TESTS=OFF + "-DCMAKE_INSTALL_PREFIX=${RAPIDJSON_PREFIX}") + + externalproject_add(rapidjson_ep + ${EP_COMMON_OPTIONS} + PREFIX "${CMAKE_BINARY_DIR}" + URL ${RAPIDJSON_SOURCE_URL} + URL_HASH "SHA256=${ARROW_RAPIDJSON_BUILD_SHA256_CHECKSUM}" + CMAKE_ARGS ${RAPIDJSON_CMAKE_ARGS}) + + set(RAPIDJSON_INCLUDE_DIR "${RAPIDJSON_PREFIX}/include") + # The include directory must exist before it is referenced by a target. + file(MAKE_DIRECTORY "${RAPIDJSON_INCLUDE_DIR}") + + add_library(RapidJSON INTERFACE IMPORTED) + target_include_directories(RapidJSON INTERFACE "${RAPIDJSON_INCLUDE_DIR}") + add_dependencies(RapidJSON rapidjson_ep) + + set(RAPIDJSON_VENDORED TRUE) +endmacro() + +if(ARROW_WITH_RAPIDJSON) + set(ARROW_RAPIDJSON_REQUIRED_VERSION "1.1.0") + resolve_dependency(RapidJSON + HAVE_ALT + TRUE + REQUIRED_VERSION + ${ARROW_RAPIDJSON_REQUIRED_VERSION} + IS_RUNTIME_DEPENDENCY + FALSE) +endif() + +macro(build_xsimd) + message(STATUS "Building xsimd from source") + set(XSIMD_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/xsimd_ep/src/xsimd_ep-install") + set(XSIMD_CMAKE_ARGS ${EP_COMMON_CMAKE_ARGS} "-DCMAKE_INSTALL_PREFIX=${XSIMD_PREFIX}") + + externalproject_add(xsimd_ep + ${EP_COMMON_OPTIONS} + PREFIX "${CMAKE_BINARY_DIR}" + URL ${XSIMD_SOURCE_URL} + URL_HASH "SHA256=${ARROW_XSIMD_BUILD_SHA256_CHECKSUM}" + CMAKE_ARGS ${XSIMD_CMAKE_ARGS}) + + set(XSIMD_INCLUDE_DIR "${XSIMD_PREFIX}/include") + # The include directory must exist before it is referenced by a target. + file(MAKE_DIRECTORY "${XSIMD_INCLUDE_DIR}") + + add_library(arrow::xsimd INTERFACE IMPORTED) + target_include_directories(arrow::xsimd INTERFACE "${XSIMD_INCLUDE_DIR}") + add_dependencies(arrow::xsimd xsimd_ep) + + set(XSIMD_VENDORED TRUE) +endmacro() + +if((NOT ARROW_SIMD_LEVEL STREQUAL "NONE") OR (NOT ARROW_RUNTIME_SIMD_LEVEL STREQUAL "NONE" + )) + set(ARROW_USE_XSIMD TRUE) +else() + set(ARROW_USE_XSIMD FALSE) +endif() + +if(ARROW_USE_XSIMD) + resolve_dependency(xsimd + FORCE_ANY_NEWER_VERSION + TRUE + IS_RUNTIME_DEPENDENCY + FALSE + REQUIRED_VERSION + "14.0.0") + + if(xsimd_SOURCE STREQUAL "BUNDLED") + set(ARROW_XSIMD arrow::xsimd) + else() + message(STATUS "xsimd found. Headers: ${xsimd_INCLUDE_DIRS}") + set(ARROW_XSIMD xsimd) + endif() +endif() + +macro(build_zlib) + message(STATUS "Building ZLIB from source") + + # ensure zlib is built with -fpic + # and make sure that the build finds the version in Emscripten ports + # - n.b. the actual linking happens because -sUSE_ZLIB=1 is + # set in the compiler variables, but cmake expects + # it to exist at configuration time if we aren't building it as + # bundled. We need to do this for all packages + # not just zlib as some depend on zlib, but we don't rebuild + # if it exists already + if(CMAKE_SYSTEM_NAME STREQUAL "Emscripten") + # build zlib using Emscripten ports + if(NOT EXISTS ${EMSCRIPTEN_SYSROOT}/lib/wasm32-emscripten/pic/libz.a) + execute_process(COMMAND embuilder --pic --force build zlib) + endif() + add_library(ZLIB::ZLIB STATIC IMPORTED) + set_property(TARGET ZLIB::ZLIB + PROPERTY IMPORTED_LOCATION + "${EMSCRIPTEN_SYSROOT}/lib/wasm32-emscripten/pic/libz.a") + target_include_directories(ZLIB::ZLIB INTERFACE "${EMSCRIPTEN_SYSROOT}/include") + list(APPEND ARROW_BUNDLED_STATIC_LIBS ZLIB::ZLIB) + else() + set(ZLIB_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/zlib_ep/src/zlib_ep-install") + if(MSVC) + if(${UPPERCASE_BUILD_TYPE} STREQUAL "DEBUG") + set(ZLIB_STATIC_LIB_NAME zlibstaticd.lib) + else() + set(ZLIB_STATIC_LIB_NAME zlibstatic.lib) + endif() + else() + set(ZLIB_STATIC_LIB_NAME libz.a) + endif() + set(ZLIB_STATIC_LIB "${ZLIB_PREFIX}/lib/${ZLIB_STATIC_LIB_NAME}") + set(ZLIB_CMAKE_ARGS ${EP_COMMON_CMAKE_ARGS} "-DCMAKE_INSTALL_PREFIX=${ZLIB_PREFIX}") + + externalproject_add(zlib_ep + ${EP_COMMON_OPTIONS} + URL ${ZLIB_SOURCE_URL} + URL_HASH "SHA256=${ARROW_ZLIB_BUILD_SHA256_CHECKSUM}" + BUILD_BYPRODUCTS "${ZLIB_STATIC_LIB}" + CMAKE_ARGS ${ZLIB_CMAKE_ARGS}) + + file(MAKE_DIRECTORY "${ZLIB_PREFIX}/include") + + add_library(ZLIB::ZLIB STATIC IMPORTED) + set(ZLIB_LIBRARIES ${ZLIB_STATIC_LIB}) + set(ZLIB_INCLUDE_DIRS "${ZLIB_PREFIX}/include") + set_target_properties(ZLIB::ZLIB PROPERTIES IMPORTED_LOCATION ${ZLIB_LIBRARIES}) + target_include_directories(ZLIB::ZLIB BEFORE INTERFACE "${ZLIB_INCLUDE_DIRS}") + + add_dependencies(ZLIB::ZLIB zlib_ep) + list(APPEND ARROW_BUNDLED_STATIC_LIBS ZLIB::ZLIB) + endif() + + set(ZLIB_VENDORED TRUE) +endmacro() + +if(ARROW_WITH_ZLIB) + resolve_dependency(ZLIB PC_PACKAGE_NAMES zlib) +endif() + +function(build_lz4) + message(STATUS "Building LZ4 from source using FetchContent") + + # Set LZ4 as vendored + set(LZ4_VENDORED + TRUE + PARENT_SCOPE) + + # Declare the content + fetchcontent_declare(lz4 + URL ${LZ4_SOURCE_URL} + URL_HASH "SHA256=${ARROW_LZ4_BUILD_SHA256_CHECKSUM}" + SOURCE_SUBDIR "build/cmake") + + # Prepare fetch content environment + prepare_fetchcontent() + + # Set LZ4-specific build options as cache variables + set(LZ4_BUILD_CLI + OFF + CACHE BOOL "Don't build LZ4 CLI" FORCE) + set(LZ4_BUILD_LEGACY_LZ4C + OFF + CACHE BOOL "Don't build legacy LZ4 tools" FORCE) + + # Make the dependency available - this will actually perform the download and configure + fetchcontent_makeavailable(lz4) + + # Use LZ4::lz4 as an imported library not an alias of lz4_static so other targets such as orc + # can depend on it as an external library. External libraries are ignored in + # install(TARGETS orc EXPORT orc_targets) and install(EXPORT orc_targets). + add_library(LZ4::lz4 INTERFACE IMPORTED) + target_link_libraries(LZ4::lz4 INTERFACE lz4_static) + + # Add to bundled static libs. + # We must use lz4_static (not imported target) not LZ4::lz4 (imported target). + set(ARROW_BUNDLED_STATIC_LIBS + ${ARROW_BUNDLED_STATIC_LIBS} lz4_static + PARENT_SCOPE) +endfunction() + +if(ARROW_WITH_LZ4) + resolve_dependency(lz4 + HAVE_ALT + TRUE + PC_PACKAGE_NAMES + liblz4) +endif() + +macro(build_zstd) + message(STATUS "Building Zstandard from source") + + set(ZSTD_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/zstd_ep-install") + + set(ZSTD_CMAKE_ARGS + ${EP_COMMON_CMAKE_ARGS} + "-DCMAKE_INSTALL_PREFIX=${ZSTD_PREFIX}" + -DZSTD_BUILD_PROGRAMS=OFF + -DZSTD_BUILD_SHARED=OFF + -DZSTD_BUILD_STATIC=ON + -DZSTD_MULTITHREAD_SUPPORT=OFF) + + if(MSVC) + set(ZSTD_STATIC_LIB "${ZSTD_PREFIX}/lib/zstd_static.lib") + if(ARROW_USE_STATIC_CRT) + list(APPEND ZSTD_CMAKE_ARGS "-DZSTD_USE_STATIC_RUNTIME=ON") + endif() + else() + set(ZSTD_STATIC_LIB "${ZSTD_PREFIX}/lib/libzstd.a") + endif() + + externalproject_add(zstd_ep + ${EP_COMMON_OPTIONS} + CMAKE_ARGS ${ZSTD_CMAKE_ARGS} + SOURCE_SUBDIR "build/cmake" + INSTALL_DIR ${ZSTD_PREFIX} + URL ${ZSTD_SOURCE_URL} + URL_HASH "SHA256=${ARROW_ZSTD_BUILD_SHA256_CHECKSUM}" + BUILD_BYPRODUCTS "${ZSTD_STATIC_LIB}") + + file(MAKE_DIRECTORY "${ZSTD_PREFIX}/include") + + add_library(zstd::libzstd_static STATIC IMPORTED) + set_target_properties(zstd::libzstd_static PROPERTIES IMPORTED_LOCATION + "${ZSTD_STATIC_LIB}") + target_include_directories(zstd::libzstd_static BEFORE + INTERFACE "${ZSTD_PREFIX}/include") + + add_dependencies(zstd::libzstd_static zstd_ep) + + list(APPEND ARROW_BUNDLED_STATIC_LIBS zstd::libzstd_static) + + set(ZSTD_VENDORED TRUE) +endmacro() + +if(ARROW_WITH_ZSTD) + # ARROW-13384: ZSTD_minCLevel was added in v1.4.0, required by ARROW-13091 + resolve_dependency(zstd + HAVE_ALT + TRUE + PC_PACKAGE_NAMES + libzstd + REQUIRED_VERSION + 1.4.0) + + if(ZSTD_VENDORED) + set(ARROW_ZSTD_LIBZSTD zstd::libzstd_static) + else() + if(ARROW_ZSTD_USE_SHARED) + set(ARROW_ZSTD_LIBZSTD zstd::libzstd_shared) + else() + set(ARROW_ZSTD_LIBZSTD zstd::libzstd_static) + endif() + # vcpkg uses zstd::libzstd + if(NOT TARGET ${ARROW_ZSTD_LIBZSTD} AND TARGET zstd::libzstd) + set(ARROW_ZSTD_LIBZSTD zstd::libzstd) + endif() + if(NOT TARGET ${ARROW_ZSTD_LIBZSTD}) + message(FATAL_ERROR "Zstandard target doesn't exist: ${ARROW_ZSTD_LIBZSTD}") + endif() + message(STATUS "Found Zstandard: ${ARROW_ZSTD_LIBZSTD}") + endif() +endif() + +# ---------------------------------------------------------------------- +# RE2 (required for Gandiva and gRPC) + +function(build_re2) + list(APPEND CMAKE_MESSAGE_INDENT "RE2: ") + message(STATUS "Building RE2 from source using FetchContent") + set(RE2_VENDORED + TRUE + PARENT_SCOPE) + + fetchcontent_declare(re2 + ${FC_DECLARE_COMMON_OPTIONS} + URL ${RE2_SOURCE_URL} + URL_HASH "SHA256=${ARROW_RE2_BUILD_SHA256_CHECKSUM}") + prepare_fetchcontent() + + # Unity build causes some build errors + set(CMAKE_UNITY_BUILD OFF) + + fetchcontent_makeavailable(re2) + + # Suppress -Wnested-anon-types warnings from RE2's use of anonymous types + # in anonymous unions (a compiler extension). + # See: https://github.com/apache/arrow/issues/48973 + if(CMAKE_CXX_COMPILER_ID STREQUAL "Clang") + target_compile_options(re2 PRIVATE -Wno-nested-anon-types) + endif() + + if(CMAKE_VERSION VERSION_LESS 3.28) + set_property(DIRECTORY ${re2_SOURCE_DIR} PROPERTY EXCLUDE_FROM_ALL TRUE) + endif() + + set(ARROW_BUNDLED_STATIC_LIBS + ${ARROW_BUNDLED_STATIC_LIBS} re2::re2 + PARENT_SCOPE) + list(POP_BACK CMAKE_MESSAGE_INDENT) +endfunction() + +if(ARROW_WITH_RE2) + resolve_dependency(re2 + HAVE_ALT + TRUE + PC_PACKAGE_NAMES + re2) +endif() + +macro(build_bzip2) + message(STATUS "Building BZip2 from source") + set(BZIP2_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/bzip2_ep-install") + set(BZIP2_STATIC_LIB + "${BZIP2_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}bz2${CMAKE_STATIC_LIBRARY_SUFFIX}" + ) + + set(BZIP2_EXTRA_ARGS "CC=${CMAKE_C_COMPILER}" "CFLAGS=${EP_C_FLAGS}") + + if(CMAKE_OSX_SYSROOT) + list(APPEND BZIP2_EXTRA_ARGS "SDKROOT=${CMAKE_OSX_SYSROOT}") + endif() + + if(CMAKE_AR) + list(APPEND BZIP2_EXTRA_ARGS AR=${CMAKE_AR}) + endif() + + if(CMAKE_RANLIB) + list(APPEND BZIP2_EXTRA_ARGS RANLIB=${CMAKE_RANLIB}) + endif() + + externalproject_add(bzip2_ep + ${EP_COMMON_OPTIONS} + CONFIGURE_COMMAND "" + BUILD_IN_SOURCE 1 + BUILD_COMMAND ${MAKE} libbz2.a ${MAKE_BUILD_ARGS} + ${BZIP2_EXTRA_ARGS} + INSTALL_COMMAND ${MAKE} install -j1 PREFIX=${BZIP2_PREFIX} + ${BZIP2_EXTRA_ARGS} + INSTALL_DIR ${BZIP2_PREFIX} + URL ${ARROW_BZIP2_SOURCE_URL} + URL_HASH "SHA256=${ARROW_BZIP2_BUILD_SHA256_CHECKSUM}" + BUILD_BYPRODUCTS "${BZIP2_STATIC_LIB}") + + file(MAKE_DIRECTORY "${BZIP2_PREFIX}/include") + add_library(BZip2::BZip2 STATIC IMPORTED) + set_target_properties(BZip2::BZip2 PROPERTIES IMPORTED_LOCATION "${BZIP2_STATIC_LIB}") + target_include_directories(BZip2::BZip2 BEFORE INTERFACE "${BZIP2_PREFIX}/include") + set(BZIP2_INCLUDE_DIR "${BZIP2_PREFIX}/include") + + add_dependencies(BZip2::BZip2 bzip2_ep) + + list(APPEND ARROW_BUNDLED_STATIC_LIBS BZip2::BZip2) +endmacro() + +if(ARROW_WITH_BZ2) + resolve_dependency(BZip2 PC_PACKAGE_NAMES bzip2) + + if(${BZip2_SOURCE} STREQUAL "SYSTEM" + AND NOT bzip2_PC_FOUND + AND ARROW_BUILD_STATIC) + get_target_property(BZIP2_TYPE BZip2::BZip2 TYPE) + if(BZIP2_TYPE STREQUAL "INTERFACE_LIBRARY") + # Conan + string(APPEND ARROW_PC_LIBS_PRIVATE + " $>>") + else() + string(APPEND ARROW_PC_LIBS_PRIVATE " $") + endif() + endif() +endif() + +macro(build_utf8proc) + message(STATUS "Building utf8proc from source") + set(UTF8PROC_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/utf8proc_ep-install") + if(MSVC) + set(UTF8PROC_STATIC_LIB "${UTF8PROC_PREFIX}/lib/utf8proc_static.lib") + else() + set(UTF8PROC_STATIC_LIB + "${UTF8PROC_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}utf8proc${CMAKE_STATIC_LIBRARY_SUFFIX}" + ) + endif() + + set(UTF8PROC_CMAKE_ARGS ${EP_COMMON_CMAKE_ARGS} + "-DCMAKE_INSTALL_PREFIX=${UTF8PROC_PREFIX}") + + # We can remove this once we remove -DCMAKE_POLICY_VERSION_MINIMUM=3.5 + # from EP_COMMON_CMAKE_ARGS. + list(REMOVE_ITEM UTF8PROC_CMAKE_ARGS -DCMAKE_POLICY_VERSION_MINIMUM=3.5) + + externalproject_add(utf8proc_ep + ${EP_COMMON_OPTIONS} + CMAKE_ARGS ${UTF8PROC_CMAKE_ARGS} + INSTALL_DIR ${UTF8PROC_PREFIX} + URL ${ARROW_UTF8PROC_SOURCE_URL} + URL_HASH "SHA256=${ARROW_UTF8PROC_BUILD_SHA256_CHECKSUM}" + BUILD_BYPRODUCTS "${UTF8PROC_STATIC_LIB}") + + file(MAKE_DIRECTORY "${UTF8PROC_PREFIX}/include") + add_library(utf8proc::utf8proc STATIC IMPORTED) + set_target_properties(utf8proc::utf8proc + PROPERTIES IMPORTED_LOCATION "${UTF8PROC_STATIC_LIB}" + INTERFACE_COMPILE_DEFINITIONS "UTF8PROC_STATIC") + target_include_directories(utf8proc::utf8proc BEFORE + INTERFACE "${UTF8PROC_PREFIX}/include") + + add_dependencies(utf8proc::utf8proc utf8proc_ep) + + list(APPEND ARROW_BUNDLED_STATIC_LIBS utf8proc::utf8proc) +endmacro() + +if(ARROW_WITH_UTF8PROC) + set(utf8proc_resolve_dependency_args utf8proc PC_PACKAGE_NAMES libutf8proc) + if(NOT ARROW_VCPKG) + # utf8proc in vcpkg doesn't provide version information: + # https://github.com/microsoft/vcpkg/issues/39176 + list(APPEND utf8proc_resolve_dependency_args REQUIRED_VERSION "2.2.0") + endif() + resolve_dependency(${utf8proc_resolve_dependency_args}) +endif() + +function(build_cares) + list(APPEND CMAKE_MESSAGE_INDENT "c-ares: ") + message(STATUS "Building c-ares from source using FetchContent") + set(CARES_VENDORED + TRUE + PARENT_SCOPE) + + fetchcontent_declare(cares + URL ${CARES_SOURCE_URL} + URL_HASH "SHA256=${ARROW_CARES_BUILD_SHA256_CHECKSUM}") + + prepare_fetchcontent() + + set(CARES_SHARED OFF) + set(CARES_STATIC ON) + set(CARES_BUILD_TOOLS OFF) + fetchcontent_makeavailable(cares) + + if(APPLE) + # libresolv must be linked from c-ares version 1.16.1 + find_library(LIBRESOLV_LIBRARY NAMES resolv libresolv REQUIRED) + set_target_properties(c-ares::cares PROPERTIES INTERFACE_LINK_LIBRARIES + "${LIBRESOLV_LIBRARY}") + endif() + + set(ARROW_BUNDLED_STATIC_LIBS + ${ARROW_BUNDLED_STATIC_LIBS} c-ares::cares + PARENT_SCOPE) + list(POP_BACK CMAKE_MESSAGE_INDENT) +endfunction() + +# ---------------------------------------------------------------------- +# Dependencies for Arrow Flight RPC + +function(build_absl) + list(APPEND CMAKE_MESSAGE_INDENT "ABSL: ") + message(STATUS "Building Abseil from source using FetchContent") + set(ABSL_VENDORED + TRUE + PARENT_SCOPE) + + if(CMAKE_COMPILER_IS_GNUCC AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 13.0) + string(APPEND CMAKE_CXX_FLAGS " -include stdint.h") + endif() + + fetchcontent_declare(absl + ${FC_DECLARE_COMMON_OPTIONS} OVERRIDE_FIND_PACKAGE + URL ${ABSL_SOURCE_URL} + URL_HASH "SHA256=${ARROW_ABSL_BUILD_SHA256_CHECKSUM}") + + prepare_fetchcontent() + + # We have to enable Abseil install to add Abseil targets to an export set. + # But we don't install Abseil by EXCLUDE_FROM_ALL. + set(ABSL_ENABLE_INSTALL ON) + fetchcontent_makeavailable(absl) + + if(CMAKE_VERSION VERSION_LESS 3.28) + set_property(DIRECTORY ${absl_SOURCE_DIR} PROPERTY EXCLUDE_FROM_ALL TRUE) + endif() + + if(APPLE) + # This is due to upstream absl::cctz issue + # https://github.com/abseil/abseil-cpp/issues/283 + find_library(CoreFoundation CoreFoundation) + # When ABSL_ENABLE_INSTALL is ON, the real target is "time" not "absl_time" + # Cannot use set_property on alias targets (absl::time is an alias) + set_property(TARGET time + APPEND + PROPERTY INTERFACE_LINK_LIBRARIES ${CoreFoundation}) + endif() + list(POP_BACK CMAKE_MESSAGE_INDENT) +endfunction() + +function(build_grpc) + resolve_dependency(c-ares + ARROW_CMAKE_PACKAGE_NAME + ArrowFlight + ARROW_PC_PACKAGE_NAME + arrow-flight + HAVE_ALT + TRUE + PC_PACKAGE_NAMES + libcares) + + list(APPEND CMAKE_MESSAGE_INDENT "gRPC: ") + message(STATUS "Building gRPC from source using FetchContent") + set(GRPC_VENDORED + TRUE + PARENT_SCOPE) + + fetchcontent_declare(grpc + ${FC_DECLARE_COMMON_OPTIONS} + URL ${GRPC_SOURCE_URL} + URL_HASH "SHA256=${ARROW_GRPC_BUILD_SHA256_CHECKSUM}") + + prepare_fetchcontent() + + if(PROTOBUF_VENDORED) + set(_gRPC_PROTOBUF_LIBRARIES "protobuf::libprotobuf") + + set(_gRPC_PROTOBUF_PROTOC_LIBRARIES "protobuf::libprotoc") + + set(_gRPC_PROTOBUF_PROTOC_EXECUTABLE "$") + + # gRPC needs this at configure time. + get_filename_component(_protobuf_root_dir "${protobuf_SOURCE_DIR}" DIRECTORY) + set(_gRPC_PROTOBUF_WELLKNOWN_INCLUDE_DIR "${_protobuf_root_dir}/src") + endif() + + # Use "none" provider for c-ares or re2, either we vendored it or we already found it. + set(gRPC_CARES_PROVIDER + "none" + CACHE STRING "" FORCE) + set(_gRPC_CARES_LIBRARIES "c-ares::cares") + + set(gRPC_RE2_PROVIDER "none") + set(_gRPC_RE2_LIBRARIES "re2::re2") + + set(gRPC_SSL_PROVIDER "none") + set(_gRPC_SSL_LIBRARIES "OpenSSL::SSL;OpenSSL::Crypto") + set(gRPC_ZLIB_PROVIDER "package") + set(gRPC_INSTALL OFF) + set(gRPC_BUILD_TESTS OFF) + + # Add warning suppression flags for gRPC build. + if(NOT MSVC) + string(APPEND CMAKE_C_FLAGS " -Wno-attributes -Wno-format-security") + string(APPEND CMAKE_CXX_FLAGS " -Wno-attributes -Wno-format-security") + endif() + if(CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang" OR CMAKE_CXX_COMPILER_ID STREQUAL + "Clang") + string(APPEND CMAKE_C_FLAGS " -Wno-unknown-warning-option") + string(APPEND CMAKE_CXX_FLAGS " -Wno-unknown-warning-option") + endif() + + fetchcontent_makeavailable(grpc) + + if(CMAKE_VERSION VERSION_LESS 3.28) + set_property(DIRECTORY ${grpc_SOURCE_DIR} PROPERTY EXCLUDE_FROM_ALL TRUE) + endif() + + # FetchContent builds gRPC libraries without gRPC:: prefix. + # Create gRPC:: alias targets for consistency. + set(GRPC_LIBRARY_TARGETS + address_sorting + gpr + grpc + grpc++ + grpc++_reflection + upb) + + foreach(target ${GRPC_LIBRARY_TARGETS}) + if(TARGET ${target} AND NOT TARGET gRPC::${target}) + add_library(gRPC::${target} ALIAS ${target}) + endif() + endforeach() + + if(TARGET grpc_cpp_plugin AND NOT TARGET gRPC::grpc_cpp_plugin) + add_executable(gRPC::grpc_cpp_plugin ALIAS grpc_cpp_plugin) + endif() + + # gRPC headers use deprecated std::iterator that causes compilation errors. + # This workaround can be removed once we upgrade to a newer gRPC version. + if(NOT MSVC) + foreach(target ${GRPC_LIBRARY_TARGETS}) + if(TARGET ${target}) + target_compile_options(${target} INTERFACE -Wno-error=deprecated-declarations) + endif() + endforeach() + endif() + + # ar -M rejects with the "libgrpc++.a" filename because "+" is a line + # continuation character in these scripts, so we have to create a copy of the + # static lib that we will bundle later. + set(GRPC_STATIC_LIBRARY_GRPCPP_FOR_AR + "${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}grpcpp${CMAKE_STATIC_LIBRARY_SUFFIX}" + ) + add_custom_command(OUTPUT ${GRPC_STATIC_LIBRARY_GRPCPP_FOR_AR} + COMMAND ${CMAKE_COMMAND} -E copy $ + ${GRPC_STATIC_LIBRARY_GRPCPP_FOR_AR} + DEPENDS gRPC::grpc++) + add_library(gRPC::grpcpp_for_bundling STATIC IMPORTED) + set_target_properties(gRPC::grpcpp_for_bundling + PROPERTIES IMPORTED_LOCATION + "${GRPC_STATIC_LIBRARY_GRPCPP_FOR_AR}") + + set_source_files_properties("${GRPC_STATIC_LIBRARY_GRPCPP_FOR_AR}" PROPERTIES GENERATED + TRUE) + add_custom_target(grpc_copy_grpc++ ALL DEPENDS "${GRPC_STATIC_LIBRARY_GRPCPP_FOR_AR}") + add_dependencies(gRPC::grpcpp_for_bundling grpc_copy_grpc++) + + # Add gRPC libraries to bundled static libs. + list(APPEND + ARROW_BUNDLED_STATIC_LIBS + gRPC::address_sorting + gRPC::gpr + gRPC::grpc + gRPC::grpcpp_for_bundling + gRPC::upb) + set(ARROW_BUNDLED_STATIC_LIBS + "${ARROW_BUNDLED_STATIC_LIBS}" + PARENT_SCOPE) + list(POP_BACK CMAKE_MESSAGE_INDENT) +endfunction() + +if(ARROW_WITH_GOOGLE_CLOUD_CPP OR ARROW_WITH_GRPC) + set(ARROW_ABSL_REQUIRED_VERSION 20211102) + # Google Cloud C++ SDK and gRPC require Google Abseil + if(ARROW_WITH_GOOGLE_CLOUD_CPP) + set(ARROW_ABSL_CMAKE_PACKAGE_NAME Arrow) + set(ARROW_ABSL_PC_PACKAGE_NAME arrow) + else() + set(ARROW_ABSL_CMAKE_PACKAGE_NAME ArrowFlight) + set(ARROW_ABSL_PC_PACKAGE_NAME arrow-flight) + endif() + resolve_dependency(absl + ARROW_CMAKE_PACKAGE_NAME + ${ARROW_ABSL_CMAKE_PACKAGE_NAME} + ARROW_PC_PACKAGE_NAME + ${ARROW_ABSL_PC_PACKAGE_NAME} + HAVE_ALT + TRUE + FORCE_ANY_NEWER_VERSION + TRUE + REQUIRED_VERSION + ${ARROW_ABSL_REQUIRED_VERSION}) +endif() + +if(ARROW_WITH_GRPC) + if(NOT ARROW_ENABLE_THREADING) + message(FATAL_ERROR "Can't use gRPC with ARROW_ENABLE_THREADING=OFF") + endif() + + set(ARROW_GRPC_REQUIRED_VERSION "1.30.0") + if(absl_SOURCE STREQUAL "BUNDLED" AND NOT gRPC_SOURCE STREQUAL "BUNDLED") + # System gRPC can't be used with bundled Abseil + message(STATUS "Forcing gRPC_SOURCE to BUNDLED because absl_SOURCE is BUNDLED") + set(gRPC_SOURCE "BUNDLED") + endif() + if(NOT Protobuf_SOURCE STREQUAL gRPC_SOURCE) + # ARROW-15495: Protobuf/gRPC must come from the same source + message(STATUS "Forcing gRPC_SOURCE to Protobuf_SOURCE (${Protobuf_SOURCE})") + set(gRPC_SOURCE "${Protobuf_SOURCE}") + endif() + resolve_dependency(gRPC + ARROW_CMAKE_PACKAGE_NAME + ArrowFlight + ARROW_PC_PACKAGE_NAME + arrow-flight + HAVE_ALT + TRUE + PC_PACKAGE_NAMES + grpc++ + REQUIRED_VERSION + ${ARROW_GRPC_REQUIRED_VERSION}) + + if(GRPC_VENDORED) + # Remove "v" from "vX.Y.Z" + string(SUBSTRING ${ARROW_GRPC_BUILD_VERSION} 1 -1 ARROW_GRPC_VERSION) + # Examples need to link to static Arrow if we're using static gRPC + set(ARROW_GRPC_USE_SHARED OFF) + else() + if(gRPCAlt_VERSION) + set(ARROW_GRPC_VERSION ${gRPCAlt_VERSION}) + else() + set(ARROW_GRPC_VERSION ${gRPC_VERSION}) + endif() + if(ARROW_USE_ASAN) + # Disable ASAN in system gRPC. + add_library(gRPC::grpc_asan_suppressed INTERFACE IMPORTED) + target_compile_definitions(gRPC::grpc_asan_suppressed + INTERFACE "GRPC_ASAN_SUPPRESSED") + target_link_libraries(gRPC::grpc++ INTERFACE gRPC::grpc_asan_suppressed) + endif() + endif() + + if(ARROW_GRPC_CPP_PLUGIN) + if(NOT TARGET gRPC::grpc_cpp_plugin) + add_executable(gRPC::grpc_cpp_plugin IMPORTED) + endif() + set_target_properties(gRPC::grpc_cpp_plugin PROPERTIES IMPORTED_LOCATION + ${ARROW_GRPC_CPP_PLUGIN}) + endif() +endif() + +# ---------------------------------------------------------------------- +# GCS and dependencies + +function(build_crc32c_once) + list(APPEND CMAKE_MESSAGE_INDENT "CRC32C: ") + message(STATUS "Building CRC32C from source using FetchContent") + set(CRC32C_VENDORED + TRUE + PARENT_SCOPE) + set(CRC32C_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/crc32c_fc-install") + set(CRC32C_PREFIX + "${CRC32C_PREFIX}" + PARENT_SCOPE) + + fetchcontent_declare(crc32c + ${FC_DECLARE_COMMON_OPTIONS} OVERRIDE_FIND_PACKAGE + URL ${CRC32C_SOURCE_URL} + URL_HASH "SHA256=${ARROW_CRC32C_BUILD_SHA256_CHECKSUM}") + + prepare_fetchcontent() + + set(CRC32C_BUILD_TESTS OFF) + set(CRC32C_BUILD_BENCHMARKS OFF) + set(CRC32C_USE_GLOG OFF) + fetchcontent_makeavailable(crc32c) + + if(CMAKE_VERSION VERSION_LESS 3.28) + set_property(DIRECTORY ${crc32c_SOURCE_DIR} PROPERTY EXCLUDE_FROM_ALL TRUE) + endif() + + # Create alias target for consistency (crc32c exports as Crc32c::crc32c when installed) + if(NOT TARGET Crc32c::crc32c) + add_library(Crc32c::crc32c ALIAS crc32c) + endif() + + set(ARROW_BUNDLED_STATIC_LIBS + ${ARROW_BUNDLED_STATIC_LIBS} Crc32c::crc32c + PARENT_SCOPE) + list(POP_BACK CMAKE_MESSAGE_INDENT) +endfunction() + +function(build_nlohmann_json) + list(APPEND CMAKE_MESSAGE_INDENT "nlohmann-json: ") + message(STATUS "Building nlohmann-json from source using FetchContent") + set(NLOHMANN_JSON_VENDORED + TRUE + PARENT_SCOPE) + fetchcontent_declare(nlohmann_json + ${FC_DECLARE_COMMON_OPTIONS} OVERRIDE_FIND_PACKAGE + URL ${NLOHMANN_JSON_SOURCE_URL} + URL_HASH "SHA256=${ARROW_NLOHMANN_JSON_BUILD_SHA256_CHECKSUM}") + + prepare_fetchcontent() + + # google-cloud-cpp requires JSON_MultipleHeaders=ON + set(JSON_BuildTests OFF) + set(JSON_MultipleHeaders ON) + set(JSON_Install ON) + fetchcontent_makeavailable(nlohmann_json) + + if(CMAKE_VERSION VERSION_LESS 3.28) + set_property(DIRECTORY ${nlohmann_json_SOURCE_DIR} PROPERTY EXCLUDE_FROM_ALL TRUE) + endif() + + list(POP_BACK CMAKE_MESSAGE_INDENT) +endfunction() + +if(ARROW_WITH_NLOHMANN_JSON) + resolve_dependency(nlohmann_json) + get_target_property(nlohmann_json_INCLUDE_DIR nlohmann_json::nlohmann_json + INTERFACE_INCLUDE_DIRECTORIES) + message(STATUS "Found nlohmann_json headers: ${nlohmann_json_INCLUDE_DIR}") +endif() + +function(build_google_cloud_cpp_storage) + list(APPEND CMAKE_MESSAGE_INDENT "google-cloud-cpp: ") + message(STATUS "Building google-cloud-cpp from source using FetchContent") + set(GOOGLE_CLOUD_CPP_VENDORED + TRUE + PARENT_SCOPE) + + # List of dependencies taken from https://github.com/googleapis/google-cloud-cpp/blob/main/doc/packaging.md + build_crc32c_once() + + fetchcontent_declare(google_cloud_cpp + ${FC_DECLARE_COMMON_OPTIONS} + URL ${google_cloud_cpp_storage_SOURCE_URL} + URL_HASH "SHA256=${ARROW_GOOGLE_CLOUD_CPP_BUILD_SHA256_CHECKSUM}") + + prepare_fetchcontent() + + message(STATUS "Only building the google-cloud-cpp::storage component") + set(GOOGLE_CLOUD_CPP_ENABLE storage) + # We need this to build with OpenSSL 3.0. + # See also: https://github.com/googleapis/google-cloud-cpp/issues/8544 + set(GOOGLE_CLOUD_CPP_ENABLE_WERROR OFF) + set(GOOGLE_CLOUD_CPP_WITH_MOCKS OFF) + # Disable installation when embedded via FetchContent + # set(GOOGLE_CLOUD_CPP_ENABLE_INSTALL OFF) + set(BUILD_TESTING OFF) + # Unity build causes some build errors. + set(CMAKE_UNITY_BUILD FALSE) + + fetchcontent_makeavailable(google_cloud_cpp) + + if(CMAKE_VERSION VERSION_LESS 3.28) + set_property(DIRECTORY ${google_cloud_cpp_SOURCE_DIR} PROPERTY EXCLUDE_FROM_ALL TRUE) + endif() + + # Remove unused directories to save build directory storage. + # 141MB -> 79MB + file(REMOVE_RECURSE "${google_cloud_cpp_SOURCE_DIR}/ci") + list(APPEND + ARROW_BUNDLED_STATIC_LIBS + google-cloud-cpp::storage + google-cloud-cpp::rest_internal + google-cloud-cpp::common) + + if(ABSL_VENDORED) + # Figure out what absl libraries (not header-only) are required by the + # google-cloud-cpp libraries above and add them to the bundled_dependencies + # + # pkg-config --libs absl_memory absl_strings absl_str_format absl_time absl_variant absl_base absl_memory absl_optional absl_span absl_time absl_variant + # (and then some regexing) + list(APPEND + ARROW_BUNDLED_STATIC_LIBS + absl::bad_optional_access + absl::bad_variant_access + absl::base + absl::civil_time + absl::cord + absl::cord_internal + absl::cordz_functions + absl::cordz_info + absl::cordz_handle + absl::debugging_internal + absl::demangle_internal + absl::exponential_biased + absl::int128 + absl::log_severity + absl::malloc_internal + absl::raw_logging_internal + absl::spinlock_wait + absl::stacktrace + absl::str_format_internal + absl::strings + absl::strings_internal + absl::symbolize + absl::synchronization + absl::throw_delegate + absl::time + absl::time_zone) + endif() + + set(ARROW_BUNDLED_STATIC_LIBS + "${ARROW_BUNDLED_STATIC_LIBS}" + PARENT_SCOPE) + + list(POP_BACK CMAKE_MESSAGE_INDENT) +endfunction() + +if(ARROW_WITH_GOOGLE_CLOUD_CPP) + if(NOT ARROW_ENABLE_THREADING) + message(FATAL_ERROR "Can't use Google Cloud Platform C++ Client Libraries with ARROW_ENABLE_THREADING=OFF" + ) + endif() + + # curl is required on all platforms. We always use system curl to + # avoid conflict. + find_curl() + resolve_dependency(google_cloud_cpp_storage PC_PACKAGE_NAMES google_cloud_cpp_storage) + get_target_property(google_cloud_cpp_storage_INCLUDE_DIR google-cloud-cpp::storage + INTERFACE_INCLUDE_DIRECTORIES) + message(STATUS "Found google-cloud-cpp::storage headers: ${google_cloud_cpp_storage_INCLUDE_DIR}" + ) +endif() + +# +# HDFS thirdparty setup + +if(DEFINED ENV{HADOOP_HOME}) + set(HADOOP_HOME $ENV{HADOOP_HOME}) + if(NOT EXISTS "${HADOOP_HOME}/include/hdfs.h") + message(STATUS "Did not find hdfs.h in expected location, using vendored one") + set(HADOOP_HOME "${THIRDPARTY_DIR}/hadoop") + endif() +else() + set(HADOOP_HOME "${THIRDPARTY_DIR}/hadoop") +endif() + +set(HDFS_H_PATH "${HADOOP_HOME}/include/hdfs.h") +if(NOT EXISTS ${HDFS_H_PATH}) + message(FATAL_ERROR "Did not find hdfs.h at ${HDFS_H_PATH}") +endif() +message(STATUS "Found hdfs.h at: ${HDFS_H_PATH}") + +add_library(arrow::hadoop INTERFACE IMPORTED) +target_include_directories(arrow::hadoop INTERFACE "${HADOOP_HOME}/include") + +# ---------------------------------------------------------------------- +# Apache ORC + +function(build_orc) + list(APPEND CMAKE_MESSAGE_INDENT "Apache ORC: ") + + message(STATUS "Building Apache ORC from source") + + if(LZ4_VENDORED) + set(ORC_LZ4_TARGET lz4_static) + set(ORC_LZ4_ROOT "${lz4_SOURCE_DIR}") + set(ORC_LZ4_INCLUDE_DIR "${lz4_SOURCE_DIR}/lib") + else() + set(ORC_LZ4_TARGET LZ4::lz4) + get_target_property(ORC_LZ4_INCLUDE_DIR ${ORC_LZ4_TARGET} + INTERFACE_INCLUDE_DIRECTORIES) + get_filename_component(ORC_LZ4_ROOT "${ORC_LZ4_INCLUDE_DIR}" DIRECTORY) + endif() + + if(CMAKE_VERSION VERSION_GREATER_EQUAL 3.29) + fetchcontent_declare(orc + ${FC_DECLARE_COMMON_OPTIONS} + URL ${ORC_SOURCE_URL} + URL_HASH "SHA256=${ARROW_ORC_BUILD_SHA256_CHECKSUM}") + prepare_fetchcontent() + + set(CMAKE_UNITY_BUILD FALSE) + + set(ORC_PREFER_STATIC_LZ4 OFF) + set(LZ4_HOME "${ORC_LZ4_ROOT}") + set(LZ4_INCLUDE_DIR "${ORC_LZ4_INCLUDE_DIR}") + set(LZ4_LIBRARY ${ORC_LZ4_TARGET}) + + set(ORC_PREFER_STATIC_PROTOBUF OFF) + get_target_property(PROTOBUF_INCLUDE_DIR ${ARROW_PROTOBUF_LIBPROTOBUF} + INTERFACE_INCLUDE_DIRECTORIES) + get_filename_component(Protobuf_ROOT "${PROTOBUF_INCLUDE_DIR}" DIRECTORY) + set(PROTOBUF_HOME ${Protobuf_ROOT}) + set(PROTOBUF_EXECUTABLE ${ARROW_PROTOBUF_PROTOC}) + set(PROTOBUF_LIBRARY ${ARROW_PROTOBUF_LIBPROTOBUF}) + set(PROTOC_LIBRARY ${ARROW_PROTOBUF_LIBPROTOC}) + + set(ORC_PREFER_STATIC_SNAPPY OFF) + get_target_property(SNAPPY_INCLUDE_DIR ${Snappy_TARGET} INTERFACE_INCLUDE_DIRECTORIES) + get_filename_component(Snappy_ROOT "${SNAPPY_INCLUDE_DIR}" DIRECTORY) + set(SNAPPY_HOME ${Snappy_ROOT}) + set(SNAPPY_LIBRARY ${Snappy_TARGET}) + + set(ORC_PREFER_STATIC_ZLIB OFF) + get_target_property(ZLIB_INCLUDE_DIR ZLIB::ZLIB INTERFACE_INCLUDE_DIRECTORIES) + get_filename_component(ZLIB_ROOT "${ZLIB_INCLUDE_DIR}" DIRECTORY) + set(ZLIB_HOME ${ZLIB_ROOT}) + # From CMake 3.21 onwards the set(CACHE) command does not remove + # any normal variable of the same name from the current scope. We + # have to manually remove the variable via unset to avoid ORC not + # finding the ZLIB_LIBRARY. + unset(ZLIB_LIBRARY) + set(ZLIB_LIBRARY + ZLIB::ZLIB + CACHE STRING "" FORCE) + + set(ORC_PREFER_STATIC_ZSTD OFF) + get_target_property(ZSTD_INCLUDE_DIR ${ARROW_ZSTD_LIBZSTD} + INTERFACE_INCLUDE_DIRECTORIES) + get_filename_component(ZSTD_ROOT "${ZSTD_INCLUDE_DIR}" DIRECTORY) + set(ZSTD_HOME ${ZSTD_ROOT}) + set(ZSTD_LIBRARY ${ARROW_ZSTD_LIBZSTD}) + + set(BUILD_CPP_TESTS OFF) + set(BUILD_JAVA OFF) + set(BUILD_LIBHDFSPP OFF) + set(BUILD_TOOLS OFF) + set(INSTALL_VENDORED_LIBS OFF) + set(STOP_BUILD_ON_WARNING OFF) + + fetchcontent_makeavailable(orc) + + add_library(orc::orc INTERFACE IMPORTED) + target_link_libraries(orc::orc INTERFACE orc) + + # ar -M rejects paths with "c++/" because "+" is a line continuation + # character in MRI scripts, so we have to create a copy of the static lib + # that we will bundle later (same issue as libgrpc++.a). + set(ORC_STATIC_LIBRARY_FOR_AR + "${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_STATIC_LIBRARY_PREFIX}orc_for_bundling${CMAKE_STATIC_LIBRARY_SUFFIX}" + ) + add_custom_command(OUTPUT ${ORC_STATIC_LIBRARY_FOR_AR} + COMMAND ${CMAKE_COMMAND} -E copy $ + ${ORC_STATIC_LIBRARY_FOR_AR} + DEPENDS orc) + add_library(orc::orc_for_bundling STATIC IMPORTED) + set_target_properties(orc::orc_for_bundling PROPERTIES IMPORTED_LOCATION + "${ORC_STATIC_LIBRARY_FOR_AR}") + set_source_files_properties("${ORC_STATIC_LIBRARY_FOR_AR}" PROPERTIES GENERATED TRUE) + add_custom_target(orc_copy_lib ALL DEPENDS "${ORC_STATIC_LIBRARY_FOR_AR}") + add_dependencies(orc::orc_for_bundling orc_copy_lib) + + list(APPEND ARROW_BUNDLED_STATIC_LIBS orc::orc_for_bundling) + else() + set(ORC_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/orc_ep-install") + set(ORC_HOME "${ORC_PREFIX}") + set(ORC_INCLUDE_DIR "${ORC_PREFIX}/include") + set(ORC_STATIC_LIB + "${ORC_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}orc${CMAKE_STATIC_LIBRARY_SUFFIX}" + ) + + get_target_property(ORC_PROTOBUF_ROOT ${ARROW_PROTOBUF_LIBPROTOBUF} + INTERFACE_INCLUDE_DIRECTORIES) + get_filename_component(ORC_PROTOBUF_ROOT "${ORC_PROTOBUF_ROOT}" DIRECTORY) + + get_target_property(ORC_SNAPPY_INCLUDE_DIR ${Snappy_TARGET} + INTERFACE_INCLUDE_DIRECTORIES) + get_filename_component(ORC_SNAPPY_ROOT "${ORC_SNAPPY_INCLUDE_DIR}" DIRECTORY) + + get_target_property(ORC_ZSTD_ROOT ${ARROW_ZSTD_LIBZSTD} INTERFACE_INCLUDE_DIRECTORIES) + get_filename_component(ORC_ZSTD_ROOT "${ORC_ZSTD_ROOT}" DIRECTORY) + + get_target_property(ORC_ZLIB_ROOT ZLIB::ZLIB INTERFACE_INCLUDE_DIRECTORIES) + get_filename_component(ORC_ZLIB_ROOT "${ORC_ZLIB_ROOT}" DIRECTORY) + + set(ORC_CMAKE_ARGS + ${EP_COMMON_CMAKE_ARGS} + "-DCMAKE_INSTALL_PREFIX=${ORC_PREFIX}" + -DSTOP_BUILD_ON_WARNING=OFF + -DBUILD_LIBHDFSPP=OFF + -DBUILD_JAVA=OFF + -DBUILD_TOOLS=OFF + -DBUILD_CPP_TESTS=OFF + -DINSTALL_VENDORED_LIBS=OFF + "-DPROTOBUF_EXECUTABLE=$" + "-DPROTOBUF_HOME=${ORC_PROTOBUF_ROOT}" + "-DPROTOBUF_INCLUDE_DIR=$" + "-DPROTOBUF_LIBRARY=$" + "-DPROTOC_LIBRARY=$" + "-DSNAPPY_HOME=${ORC_SNAPPY_ROOT}" + "-DSNAPPY_LIBRARY=$" + "-DLZ4_HOME=${ORC_LZ4_ROOT}" + "-DLZ4_LIBRARY=$" + "-DLZ4_STATIC_LIB=$" + "-DLZ4_INCLUDE_DIR=${ORC_LZ4_INCLUDE_DIR}" + "-DSNAPPY_INCLUDE_DIR=${ORC_SNAPPY_INCLUDE_DIR}" + "-DZSTD_HOME=${ORC_ZSTD_ROOT}" + "-DZSTD_INCLUDE_DIR=$" + "-DZSTD_LIBRARY=$" + "-DZLIB_HOME=${ORC_ZLIB_ROOT}" + "-DZLIB_INCLUDE_DIR=$" + "-DZLIB_LIBRARY=$") + + # Work around CMake bug + file(MAKE_DIRECTORY ${ORC_INCLUDE_DIR}) + + externalproject_add(orc_ep + ${EP_COMMON_OPTIONS} + BUILD_BYPRODUCTS ${ORC_STATIC_LIB} + CMAKE_ARGS ${ORC_CMAKE_ARGS} + DEPENDS ${ARROW_PROTOBUF_LIBPROTOBUF} + ${ARROW_PROTOBUF_PROTOC} + ${ARROW_ZSTD_LIBZSTD} + ${Snappy_TARGET} + ${ORC_LZ4_TARGET} + ZLIB::ZLIB + URL ${ORC_SOURCE_URL} + URL_HASH "SHA256=${ARROW_ORC_BUILD_SHA256_CHECKSUM}") + add_library(orc::orc STATIC IMPORTED) + set_target_properties(orc::orc PROPERTIES IMPORTED_LOCATION "${ORC_STATIC_LIB}") + target_include_directories(orc::orc BEFORE INTERFACE "${ORC_INCLUDE_DIR}") + target_link_libraries(orc::orc INTERFACE LZ4::lz4 ZLIB::ZLIB ${ARROW_ZSTD_LIBZSTD} + ${Snappy_TARGET}) + # Protobuf generated files may use ABSL_DCHECK*() and + # absl::log_internal_check_op is needed for them. + if(TARGET absl::log_internal_check_op) + target_link_libraries(orc::orc INTERFACE absl::log_internal_check_op) + endif() + if(NOT MSVC) + if(NOT APPLE AND ARROW_ENABLE_THREADING) + target_link_libraries(orc::orc INTERFACE Threads::Threads) + endif() + target_link_libraries(orc::orc INTERFACE ${CMAKE_DL_LIBS}) + endif() + target_link_libraries(orc::orc INTERFACE ${ARROW_PROTOBUF_LIBPROTOBUF}) + add_dependencies(orc::orc orc_ep) + list(APPEND ARROW_BUNDLED_STATIC_LIBS orc::orc) + endif() + + set(ORC_VENDORED + TRUE + PARENT_SCOPE) + set(ARROW_BUNDLED_STATIC_LIBS + ${ARROW_BUNDLED_STATIC_LIBS} + PARENT_SCOPE) + + list(POP_BACK CMAKE_MESSAGE_INDENT) +endfunction() + +if(ARROW_ORC) + resolve_dependency(orc HAVE_ALT TRUE) + if(ORC_VENDORED) + set(ARROW_ORC_VERSION ${ARROW_ORC_BUILD_VERSION}) + else() + target_link_libraries(orc::orc INTERFACE ${ARROW_PROTOBUF_LIBPROTOBUF}) + set(ARROW_ORC_VERSION ${orcAlt_VERSION}) + message(STATUS "Found ORC static library: ${ORC_STATIC_LIB}") + message(STATUS "Found ORC headers: ${ORC_INCLUDE_DIR}") + endif() +endif() + +# ---------------------------------------------------------------------- +# OpenTelemetry C++ + +function(build_opentelemetry) + list(APPEND CMAKE_MESSAGE_INDENT "OpenTelemetry: ") + message(STATUS "Building OpenTelemetry from source using FetchContent") + + if(Protobuf_VERSION VERSION_GREATER_EQUAL 3.22) + message(FATAL_ERROR "GH-36013: Can't use bundled OpenTelemetry with Protobuf 3.22 or later. " + "Protobuf is version ${Protobuf_VERSION}") + endif() + + set(OPENTELEMETRY_VENDORED + TRUE + PARENT_SCOPE) + + fetchcontent_declare(opentelemetry_proto + ${FC_DECLARE_COMMON_OPTIONS} + URL ${OPENTELEMETRY_PROTO_SOURCE_URL} + URL_HASH "SHA256=${ARROW_OPENTELEMETRY_PROTO_BUILD_SHA256_CHECKSUM}" + ) + + # Use FetchContent_Populate instead of MakeAvailable because opentelemetry-proto + # has no CMakeLists.txt. + cmake_policy(PUSH) + if(POLICY CMP0169) + cmake_policy(SET CMP0169 OLD) + endif() + fetchcontent_populate(opentelemetry_proto) + cmake_policy(POP) + + fetchcontent_declare(opentelemetry_cpp + ${FC_DECLARE_COMMON_OPTIONS} + URL ${OPENTELEMETRY_SOURCE_URL} + URL_HASH "SHA256=${ARROW_OPENTELEMETRY_BUILD_SHA256_CHECKSUM}") + + prepare_fetchcontent() + + # Unity build causes symbol redefinition errors in protobuf-generated code + set(CMAKE_UNITY_BUILD FALSE) + set(OTELCPP_PROTO_PATH "${opentelemetry_proto_SOURCE_DIR}") + set(WITH_EXAMPLES OFF) + set(WITH_OTLP_HTTP ON) + set(WITH_OTLP_GRPC OFF) + set(WITH_FUNC_TESTS OFF) + # These options are slated for removal in v1.14 and their features are deemed stable + # as of v1.13. However, setting their corresponding ENABLE_* macros in headers seems + # finicky - resulting in build failures or ABI-related runtime errors during HTTP + # client initialization. There may still be a solution, but we disable them for now. + set(WITH_OTLP_HTTP_SSL_PREVIEW OFF) + set(WITH_OTLP_HTTP_SSL_TLS_PREVIEW OFF) + + fetchcontent_makeavailable(opentelemetry_cpp) + + if(CMAKE_VERSION VERSION_LESS 3.28) + set_property(DIRECTORY ${opentelemetry_cpp_SOURCE_DIR} PROPERTY EXCLUDE_FROM_ALL TRUE) + endif() + + # Remove unused directories to save build directory storage + file(REMOVE_RECURSE "${opentelemetry_cpp_SOURCE_DIR}/ci") + + # OpenTelemetry creates its own targets. We need to add them to bundled static libs. + # The targets created by OpenTelemetry's CMakeLists.txt use the opentelemetry:: namespace. + # List of libraries that we actually need and want to bundle. + set(_OPENTELEMETRY_BUNDLED_LIBS + opentelemetry-cpp::common + opentelemetry-cpp::http_client_curl + opentelemetry-cpp::logs + opentelemetry-cpp::ostream_log_record_exporter + opentelemetry-cpp::ostream_span_exporter + opentelemetry-cpp::otlp_http_client + opentelemetry-cpp::otlp_http_log_record_exporter + opentelemetry-cpp::otlp_http_exporter + opentelemetry-cpp::otlp_recordable + opentelemetry-cpp::proto + opentelemetry-cpp::resources + opentelemetry-cpp::trace + opentelemetry-cpp::version) + + list(APPEND ARROW_BUNDLED_STATIC_LIBS ${_OPENTELEMETRY_BUNDLED_LIBS}) + set(ARROW_BUNDLED_STATIC_LIBS + "${ARROW_BUNDLED_STATIC_LIBS}" + PARENT_SCOPE) + + list(POP_BACK CMAKE_MESSAGE_INDENT) +endfunction() + +if(ARROW_WITH_OPENTELEMETRY) + if(NOT ARROW_ENABLE_THREADING) + message(FATAL_ERROR "Can't use OpenTelemetry with ARROW_ENABLE_THREADING=OFF") + endif() + + # cURL is required whether we build from source or use an existing installation + # (OTel's cmake files do not call find_curl for you) + find_curl() + resolve_dependency(opentelemetry-cpp) + set(ARROW_OPENTELEMETRY_LIBS + opentelemetry-cpp::trace + opentelemetry-cpp::logs + opentelemetry-cpp::otlp_http_log_record_exporter + opentelemetry-cpp::ostream_log_record_exporter + opentelemetry-cpp::ostream_span_exporter + opentelemetry-cpp::otlp_http_exporter) + get_target_property(OPENTELEMETRY_INCLUDE_DIR opentelemetry-cpp::api + INTERFACE_INCLUDE_DIRECTORIES) + message(STATUS "Found OpenTelemetry headers: ${OPENTELEMETRY_INCLUDE_DIR}") +endif() + +# ---------------------------------------------------------------------- +# AWS SDK for C++ + +function(build_awssdk) + list(APPEND CMAKE_MESSAGE_INDENT "AWS SDK for C++: ") + + message(STATUS "Building AWS SDK for C++ from source") + + # aws-c-common must be the first product because others depend on + # this. + set(AWSSDK_PRODUCTS aws-c-common) + if(LINUX) + list(APPEND AWSSDK_PRODUCTS aws-lc s2n-tls) + endif() + list(APPEND + AWSSDK_PRODUCTS + # We can't sort this in alphabetical order because some + # products depend on other products. + aws-checksums + aws-c-cal + aws-c-io + aws-c-event-stream + aws-c-sdkutils + aws-c-compression + aws-c-http + aws-c-mqtt + aws-c-auth + aws-c-s3 + aws-crt-cpp + aws-sdk-cpp) + set(AWS_SDK_CPP_SOURCE_URL "${AWSSDK_SOURCE_URL}") + set(ARROW_AWS_SDK_CPP_BUILD_SHA256_CHECKSUM "${ARROW_AWSSDK_BUILD_SHA256_CHECKSUM}") + foreach(AWSSDK_PRODUCT ${AWSSDK_PRODUCTS}) + # aws-c-cal -> + # AWS-C-CAL + string(TOUPPER "${AWSSDK_PRODUCT}" BASE_VARIABLE_NAME) + # AWS-C-CAL -> + # AWS_C_CAL + string(REGEX REPLACE "-" "_" BASE_VARIABLE_NAME "${BASE_VARIABLE_NAME}") + fetchcontent_declare(${AWSSDK_PRODUCT} + ${FC_DECLARE_COMMON_OPTIONS} OVERRIDE_FIND_PACKAGE + URL ${${BASE_VARIABLE_NAME}_SOURCE_URL} + URL_HASH "SHA256=${ARROW_${BASE_VARIABLE_NAME}_BUILD_SHA256_CHECKSUM}" + ) + endforeach() + + prepare_fetchcontent() + set(BUILD_DEPS OFF) + set(BUILD_TOOL OFF) + set(CMAKE_UNITY_BUILD OFF) # Unity build causes some build errors. + set(ENABLE_TESTING OFF) + set(IN_SOURCE_BUILD ON) + set(MINIMIZE_SIZE ON) + set(USE_OPENSSL ON) + + # For aws-c-common + if(MINGW) + # PPROCESSOR_NUMBER requires Windows 7 or later. + # + # 0x0601 == _WIN32_WINNT_WIN7 + string(APPEND CMAKE_C_FLAGS " -D_WIN32_WINNT=0x0601") + string(APPEND CMAKE_CXX_FLAGS " -D_WIN32_WINNT=0x0601") + endif() + + # For aws-lc + set(DISABLE_GO ON) + set(DISABLE_PERL ON) + + # For s2n-tls + set(crypto_INCLUDE_DIR "$") + set(crypto_STATIC_LIBRARY "$") + set(S2N_INTERN_LIBCRYPTO ON) + + # For aws-lc and s2n-tls + # + # Link time optimization is causing trouble like GH-34349 + string(REPLACE "-flto=auto" "" CMAKE_C_FLAGS "${CMAKE_C_FLAGS}") + string(REPLACE "-ffat-lto-objects" "" CMAKE_C_FLAGS "${CMAKE_C_FLAGS}") + + # For aws-c-io + if(MINGW AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS "9") + # This is for RTools 40. We can remove this after we dropped + # support for R < 4.2. schannel.h in RTools 40 is old. + + # For schannel.h + # + # See also: + # https://learn.microsoft.com/en-us/windows/win32/api/schannel/ns-schannel-schannel_cred + string(APPEND CMAKE_C_FLAGS " -DSP_PROT_TLS1_0_SERVER=0x00000040") + string(APPEND CMAKE_C_FLAGS " -DSP_PROT_TLS1_0_CLIENT=0x00000080") + string(APPEND CMAKE_C_FLAGS " -DSP_PROT_TLS1_1_SERVER=0x00000100") + string(APPEND CMAKE_C_FLAGS " -DSP_PROT_TLS1_1_CLIENT=0x00000200") + string(APPEND CMAKE_C_FLAGS " -DSP_PROT_TLS1_2_SERVER=0x00000400") + string(APPEND CMAKE_C_FLAGS " -DSP_PROT_TLS1_2_CLIENT=0x00000800") + string(APPEND CMAKE_C_FLAGS " -DSP_PROT_TLS1_3_SERVER=0x00001000") + string(APPEND CMAKE_C_FLAGS " -DSP_PROT_TLS1_3_CLIENT=0x00002000") + string(APPEND CMAKE_C_FLAGS " -DSCH_USE_STRONG_CRYPTO=0x00400000") + + # For sspi.h + # + # See also: + # https://learn.microsoft.com/en-us/windows/win32/api/sspi/ne-sspi-sec_application_protocol_negotiation_ext + string(APPEND CMAKE_C_FLAGS " -DSecApplicationProtocolNegotiationExt_ALPN=2") + # See also: + # https://learn.microsoft.com/en-us/windows/win32/api/sspi/ns-sspi-secbuffer + string(APPEND CMAKE_C_FLAGS " -DSECBUFFER_ALERT=17") + endif() + + # For aws-sdk-cpp + # + # We need to use CACHE variables because aws-sdk-cpp < 1.12.0 uses + # CMP0077 OLD policy. We can use normal variables when we use + # aws-sdk-cpp >= 1.12.0. + set(AWS_SDK_WARNINGS_ARE_ERRORS + OFF + CACHE BOOL "" FORCE) + set(BUILD_DEPS + OFF + CACHE BOOL "" FORCE) + set(BUILD_ONLY + "" + CACHE STRING "" FORCE) + list(APPEND + BUILD_ONLY + config + core + identity-management + s3 + sts + transfer) + set(BUILD_SHARED_LIBS + OFF + CACHE BOOL "" FORCE) + set(ENABLE_TESTING + OFF + CACHE BOOL "" FORCE) + if(NOT WIN32) + if(ZLIB_VENDORED) + # Use vendored zlib. + set(ZLIB_INCLUDE_DIR + "$" + CACHE STRING "" FORCE) + set(ZLIB_LIBRARY + "$" + CACHE STRING "" FORCE) + endif() + endif() + if(MINGW AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS "9") + # This is for RTools 40. We can remove this after we dropped + # support for R < 4.2. schannel.h in RTools 40 is old. + + # For winhttp.h + # + # See also: + # https://learn.microsoft.com/en-us/windows/win32/winhttp/error-messages + string(APPEND CMAKE_CXX_FLAGS " -DERROR_WINHTTP_UNHANDLED_SCRIPT_TYPE=12176") + string(APPEND CMAKE_CXX_FLAGS " -DERROR_WINHTTP_SCRIPT_EXECUTION_ERROR=12177") + # See also: + # https://learn.microsoft.com/en-us/windows/win32/api/winhttp/ns-winhttp-winhttp_async_result + string(APPEND CMAKE_CXX_FLAGS " -DAPI_GET_PROXY_FOR_URL=6") + # See also: + # https://learn.microsoft.com/en-us/windows/win32/api/winhttp/nc-winhttp-winhttp_status_callback + string(APPEND CMAKE_CXX_FLAGS " -DWINHTTP_CALLBACK_STATUS_CLOSE_COMPLETE=0x02000000") + string(APPEND CMAKE_CXX_FLAGS + " -DWINHTTP_CALLBACK_STATUS_SHUTDOWN_COMPLETE=0x04000000") + # See also: + # https://learn.microsoft.com/en-us/windows/win32/winhttp/option-flags + string(APPEND CMAKE_CXX_FLAGS " -DWINHTTP_FLAG_SECURE_PROTOCOL_TLS1_2=0x00000800") + string(APPEND CMAKE_CXX_FLAGS " -DWINHTTP_NO_CLIENT_CERT_CONTEXT=0") + endif() + + set(AWSSDK_LINK_LIBRARIES) + foreach(AWSSDK_PRODUCT ${AWSSDK_PRODUCTS}) + if("${AWSSDK_PRODUCT}" STREQUAL "s2n-tls") + # Use aws-lc's openssl/*.h not openssl/*.h in system. + set(ADDITIONAL_FLAGS "-DCOMPILE_DEFINITIONS=-I${aws-lc_SOURCE_DIR}/include") + endif() + fetchcontent_makeavailable(${AWSSDK_PRODUCT}) + if(CMAKE_VERSION VERSION_LESS 3.28) + set_property(DIRECTORY ${${AWSSDK_PRODUCT}_SOURCE_DIR} PROPERTY EXCLUDE_FROM_ALL + TRUE) + endif() + list(PREPEND CMAKE_MODULE_PATH "${${AWSSDK_PRODUCT}_SOURCE_DIR}/cmake") + if(NOT "${AWSSDK_PRODUCT}" STREQUAL "aws-sdk-cpp") + if("${AWSSDK_PRODUCT}" STREQUAL "aws-lc") + # We don't need to link aws-lc. It's used only by s2n-tls. + elseif("${AWSSDK_PRODUCT}" STREQUAL "s2n-tls") + list(PREPEND AWSSDK_LINK_LIBRARIES s2n) + # Disable -Werror for s2n-tls: it has Clang 18 warnings that it intentionally allows. + # See: https://github.com/aws/s2n-tls/issues/5696 + if(TARGET s2n) + target_compile_options(s2n PRIVATE -Wno-error) + endif() + else() + list(PREPEND AWSSDK_LINK_LIBRARIES ${AWSSDK_PRODUCT}) + # This is for find_package(aws-*) in aws-crt-cpp and aws-sdk-cpp. + add_library(AWS::${AWSSDK_PRODUCT} ALIAS ${AWSSDK_PRODUCT}) + endif() + endif() + endforeach() + list(PREPEND + AWSSDK_LINK_LIBRARIES + aws-cpp-sdk-identity-management + aws-cpp-sdk-sts + aws-cpp-sdk-cognito-identity + aws-cpp-sdk-s3 + aws-cpp-sdk-core) + + set(AWSSDK_VENDORED + TRUE + PARENT_SCOPE) + set(ARROW_BUNDLED_STATIC_LIBS + ${ARROW_BUNDLED_STATIC_LIBS} ${AWSSDK_LINK_LIBRARIES} + PARENT_SCOPE) + set(AWSSDK_LINK_LIBRARIES + ${AWSSDK_LINK_LIBRARIES} + PARENT_SCOPE) + + list(POP_BACK CMAKE_MESSAGE_INDENT) +endfunction() + +if(ARROW_S3) + if(NOT WIN32) + # This is for adding system curl dependency. + find_curl() + endif() + # Keep this in sync with s3fs.cc + resolve_dependency(AWSSDK + HAVE_ALT + TRUE + REQUIRED_VERSION + 1.11.0) + + message(STATUS "Found AWS SDK headers: ${AWSSDK_INCLUDE_DIR}") + message(STATUS "Found AWS SDK libraries: ${AWSSDK_LINK_LIBRARIES}") + + if(ARROW_BUILD_STATIC) + if(${AWSSDK_SOURCE} STREQUAL "SYSTEM") + foreach(AWSSDK_LINK_LIBRARY ${AWSSDK_LINK_LIBRARIES}) + string(APPEND ARROW_PC_LIBS_PRIVATE " $") + endforeach() + else() + if(UNIX) + string(APPEND ARROW_PC_REQUIRES_PRIVATE " libcurl") + endif() + string(APPEND ARROW_PC_REQUIRES_PRIVATE " openssl") + if(APPLE) + string(APPEND ARROW_PC_LIBS_PRIVATE " -framework Security") + endif() + endif() + endif() +endif() + +# ---------------------------------------------------------------------- +# Azure SDK for C++ + +function(build_azure_sdk) + message(STATUS "Building Azure SDK for C++ from source") + + # On Windows, Azure SDK's WinHTTP transport requires WIL (Windows Implementation Libraries). + # Fetch WIL before Azure SDK so the WIL::WIL target is available. + if(WIN32) + message(STATUS "Fetching WIL (Windows Implementation Libraries) for Azure SDK") + fetchcontent_declare(wil + ${FC_DECLARE_COMMON_OPTIONS} OVERRIDE_FIND_PACKAGE + URL ${ARROW_WIL_URL} + URL_HASH "SHA256=${ARROW_WIL_BUILD_SHA256_CHECKSUM}") + prepare_fetchcontent() + set(WIL_BUILD_PACKAGING OFF) + set(WIL_BUILD_TESTS OFF) + fetchcontent_makeavailable(wil) + endif() + + fetchcontent_declare(azure_sdk + ${FC_DECLARE_COMMON_OPTIONS} + URL ${ARROW_AZURE_SDK_URL} + URL_HASH "SHA256=${ARROW_AZURE_SDK_BUILD_SHA256_CHECKSUM}") + prepare_fetchcontent() + set(BUILD_PERFORMANCE_TESTS FALSE) + set(BUILD_SAMPLES FALSE) + set(BUILD_TESTING FALSE) + set(BUILD_WINDOWS_UWP TRUE) + # ICU 75.1 or later requires C++17 but Azure SDK for C++ still uses + # C++14. So we disable C++ API in ICU. + # + # We can remove this after + # https://github.com/Azure/azure-sdk-for-cpp/pull/6486 is merged. + string(APPEND CMAKE_CXX_FLAGS " -DU_SHOW_CPLUSPLUS_API=0") + set(CMAKE_UNITY_BUILD FALSE) + set(DISABLE_AZURE_CORE_OPENTELEMETRY TRUE) + set(ENV{AZURE_SDK_DISABLE_AUTO_VCPKG} TRUE) + set(WARNINGS_AS_ERRORS FALSE) + fetchcontent_makeavailable(azure_sdk) + if(CMAKE_VERSION VERSION_LESS 3.28) + set_property(DIRECTORY ${azure_sdk_SOURCE_DIR} PROPERTY EXCLUDE_FROM_ALL TRUE) + endif() + set(AZURE_SDK_VENDORED + TRUE + PARENT_SCOPE) + set(ARROW_BUNDLED_STATIC_LIBS + ${ARROW_BUNDLED_STATIC_LIBS} + Azure::azure-core + Azure::azure-identity + Azure::azure-storage-blobs + Azure::azure-storage-common + Azure::azure-storage-files-datalake + PARENT_SCOPE) +endfunction() + +if(ARROW_WITH_AZURE_SDK) + resolve_dependency(Azure REQUIRED_VERSION 1.10.2) + set(AZURE_SDK_LINK_LIBRARIES Azure::azure-storage-files-datalake + Azure::azure-storage-blobs Azure::azure-identity) +endif() + +# ---------------------------------------------------------------------- +# Apache Flight SQL ODBC + +if(ARROW_FLIGHT_SQL_ODBC) + find_package(ODBC REQUIRED) +endif() + +message(STATUS "All bundled static libraries: ${ARROW_BUNDLED_STATIC_LIBS}") diff --git a/python/cmake_modules/UseCython.cmake b/python/cmake_modules/UseCython.cmake new file mode 100644 index 000000000000..7d88daa4fade --- /dev/null +++ b/python/cmake_modules/UseCython.cmake @@ -0,0 +1,192 @@ +# Define a function to create Cython modules. +# +# For more information on the Cython project, see http://cython.org/. +# "Cython is a language that makes writing C extensions for the Python language +# as easy as Python itself." +# +# This file defines a CMake function to build a Cython Python module. +# To use it, first include this file. +# +# include( UseCython ) +# +# Then call cython_add_module to create a module. +# +# cython_add_module( ... ) +# +# Where is the desired name of the target for the resulting Python module, +# is the desired name of the target that runs the Cython compiler +# to generate the needed C or C++ files, is a variable to hold the +# files generated by Cython, and ... are source files +# to be compiled into the module, e.g. *.pyx, *.c, *.cxx, etc. +# only one .pyx file may be present for each target +# (this is an inherent limitation of Cython). +# +# The sample paths set with the CMake include_directories() command will be used +# for include directories to search for *.pxd when running the Cython compiler. +# +# Cache variables that effect the behavior include: +# +# CYTHON_ANNOTATE +# CYTHON_NO_DOCSTRINGS +# CYTHON_FLAGS +# +# Source file properties that effect the build process are +# +# CYTHON_IS_CXX +# CYTHON_IS_PUBLIC +# CYTHON_IS_API +# +# If this is set of a *.pyx file with CMake set_source_files_properties() +# command, the file will be compiled as a C++ file. +# +# See also FindCython.cmake + +#============================================================================= +# Copyright 2011 Kitware, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#============================================================================= + +# Configuration options. +set(CYTHON_ANNOTATE OFF CACHE BOOL "Create an annotated .html file when compiling *.pyx.") +set(CYTHON_NO_DOCSTRINGS OFF CACHE BOOL "Strip docstrings from the compiled module.") +set(CYTHON_FLAGS "" CACHE STRING "Extra flags to the cython compiler.") +mark_as_advanced(CYTHON_ANNOTATE CYTHON_NO_DOCSTRINGS CYTHON_FLAGS) + +find_package(Python3Alt REQUIRED) + +# (using another C++ extension breaks coverage) +set(CYTHON_CXX_EXTENSION "cpp") +set(CYTHON_C_EXTENSION "c") + +# Create a *.c or *.cpp file from a *.pyx file. +# Input the generated file basename. The generate files will put into the variable +# placed in the "generated_files" argument. Finally all the *.py and *.pyx files. +function(compile_pyx + _name + pyx_target_name + generated_files + pyx_file) + # Default to assuming all files are C. + set(cxx_arg "") + set(extension ${CYTHON_C_EXTENSION}) + set(pyx_lang "C") + set(comment "Compiling Cython C source for ${_name}...") + + get_filename_component(pyx_file_basename "${pyx_file}" NAME_WE) + + # Determine if it is a C or C++ file. + get_source_file_property(property_is_cxx ${pyx_file} CYTHON_IS_CXX) + if(${property_is_cxx}) + set(cxx_arg "--cplus") + set(extension ${CYTHON_CXX_EXTENSION}) + set(pyx_lang "CXX") + set(comment "Compiling Cython CXX source for ${_name}...") + endif() + get_source_file_property(pyx_location ${pyx_file} LOCATION) + + set(output_file "${_name}.${extension}") + + # Set additional flags. + if(CYTHON_ANNOTATE) + set(annotate_arg "--annotate") + endif() + + if(CYTHON_NO_DOCSTRINGS) + set(no_docstrings_arg "--no-docstrings") + endif() + + if(NOT WIN32) + string( TOLOWER "${CMAKE_BUILD_TYPE}" build_type ) + if("${build_type}" STREQUAL "debug" + OR "${build_type}" STREQUAL "relwithdebinfo") + set(cython_debug_arg "--gdb") + endif() + endif() + + # Determining generated file names. + get_source_file_property(property_is_public ${pyx_file} CYTHON_PUBLIC) + get_source_file_property(property_is_api ${pyx_file} CYTHON_API) + if(${property_is_api}) + set(_generated_files "${output_file}" "${_name}.h" "${_name}_api.h") + elseif(${property_is_public}) + set(_generated_files "${output_file}" "${_name}.h") + else() + set(_generated_files "${output_file}") + endif() + set_source_files_properties(${_generated_files} PROPERTIES GENERATED TRUE) + + if(NOT WIN32) + # Cython creates a lot of compiler warning detritus on clang + set_source_files_properties(${_generated_files} PROPERTIES COMPILE_FLAGS + -Wno-unused-function) + endif() + + set(${generated_files} ${_generated_files} PARENT_SCOPE) + + # Add the command to run the compiler. + add_custom_target( + ${pyx_target_name} + COMMAND ${PYTHON_EXECUTABLE} + -m + cython + ${cxx_arg} + ${annotate_arg} + ${no_docstrings_arg} + ${cython_debug_arg} + ${CYTHON_FLAGS} + # Necessary for autodoc of function arguments + --directive embedsignature=True + # Necessary for Cython code coverage + --working + ${CMAKE_CURRENT_SOURCE_DIR} + --output-file + "${CMAKE_CURRENT_BINARY_DIR}/${output_file}" + "${CMAKE_CURRENT_SOURCE_DIR}/${pyx_file}" + DEPENDS ${pyx_location} + # Do not specify byproducts for now since they don't work with the older + # version of cmake available in the apt repositories. + #BYPRODUCTS ${_generated_files} + COMMENT ${comment}) + + # Remove their visibility to the user. + set(corresponding_pxd_file "" CACHE INTERNAL "") + set(header_location "" CACHE INTERNAL "") + set(pxd_location "" CACHE INTERNAL "") +endfunction() + +# cython_add_module( src1 src2 ... srcN ) +# Build the Cython Python module. +function(cython_add_module _name pyx_target_name generated_files) + set(pyx_module_source "") + set(other_module_sources "") + foreach(_file ${ARGN}) + if(${_file} MATCHES ".*\\.py[x]?$") + list(APPEND pyx_module_source ${_file}) + else() + list(APPEND other_module_sources ${_file}) + endif() + endforeach() + compile_pyx(${_name} ${pyx_target_name} _generated_files ${pyx_module_source}) + set(${generated_files} ${_generated_files} PARENT_SCOPE) + include_directories(${PYTHON_INCLUDE_DIRS}) + python_add_module(${_name} ${_generated_files} ${other_module_sources}) + add_dependencies(${_name} ${pyx_target_name}) +endfunction() + +execute_process(COMMAND ${PYTHON_EXECUTABLE} -c "from Cython.Compiler.Version import version; print(version)" + OUTPUT_VARIABLE CYTHON_VERSION_OUTPUT + OUTPUT_STRIP_TRAILING_WHITESPACE) +set(CYTHON_VERSION "${CYTHON_VERSION_OUTPUT}") + +include(CMakeParseArguments) diff --git a/python/cmake_modules/Usevcpkg.cmake b/python/cmake_modules/Usevcpkg.cmake new file mode 100644 index 000000000000..016cfd169e42 --- /dev/null +++ b/python/cmake_modules/Usevcpkg.cmake @@ -0,0 +1,160 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +message(STATUS "Using vcpkg to find dependencies") + +# ---------------------------------------------------------------------- +# Define macros + +# macro to list subdirectories (non-recursive) +macro(list_subdirs SUBDIRS DIR) + file(GLOB children_ + RELATIVE ${DIR} + ${DIR}/*) + set(subdirs_ "") + foreach(child_ ${children_}) + if(IS_DIRECTORY "${DIR}/${child_}") + list(APPEND subdirs_ ${child_}) + endif() + endforeach() + set("${SUBDIRS}" ${subdirs_}) + unset(children_) + unset(subdirs_) +endmacro() + +# ---------------------------------------------------------------------- +# Get VCPKG_ROOT + +if(DEFINED CMAKE_TOOLCHAIN_FILE) + # Get it from the CMake variable CMAKE_TOOLCHAIN_FILE + get_filename_component(_VCPKG_DOT_CMAKE "${CMAKE_TOOLCHAIN_FILE}" NAME) + if(EXISTS "${CMAKE_TOOLCHAIN_FILE}" AND _VCPKG_DOT_CMAKE STREQUAL "vcpkg.cmake") + get_filename_component(_VCPKG_BUILDSYSTEMS_DIR "${CMAKE_TOOLCHAIN_FILE}" DIRECTORY) + get_filename_component(VCPKG_ROOT "${_VCPKG_BUILDSYSTEMS_DIR}/../.." ABSOLUTE) + else() + message(FATAL_ERROR "vcpkg toolchain file not found at path specified in -DCMAKE_TOOLCHAIN_FILE" + ) + endif() +else() + if(DEFINED VCPKG_ROOT) + # Get it from the CMake variable VCPKG_ROOT + find_program(_VCPKG_BIN vcpkg + PATHS "${VCPKG_ROOT}" + NO_DEFAULT_PATH) + if(NOT _VCPKG_BIN) + message(FATAL_ERROR "vcpkg not found in directory specified in -DVCPKG_ROOT") + endif() + elseif(DEFINED ENV{VCPKG_ROOT}) + # Get it from the environment variable VCPKG_ROOT + set(VCPKG_ROOT $ENV{VCPKG_ROOT}) + find_program(_VCPKG_BIN vcpkg + PATHS "${VCPKG_ROOT}" + NO_DEFAULT_PATH) + if(NOT _VCPKG_BIN) + message(FATAL_ERROR "vcpkg not found in directory in environment variable VCPKG_ROOT" + ) + endif() + else() + # Get it from the file vcpkg.path.txt + find_program(_VCPKG_BIN vcpkg) + if(_VCPKG_BIN) + get_filename_component(_VCPKG_REAL_BIN "${_VCPKG_BIN}" REALPATH) + get_filename_component(VCPKG_ROOT "${_VCPKG_REAL_BIN}" DIRECTORY) + else() + if(CMAKE_HOST_WIN32) + set(_VCPKG_PATH_TXT "$ENV{LOCALAPPDATA}/vcpkg/vcpkg.path.txt") + else() + set(_VCPKG_PATH_TXT "$ENV{HOME}/.vcpkg/vcpkg.path.txt") + endif() + if(EXISTS "${_VCPKG_PATH_TXT}") + file(READ "${_VCPKG_PATH_TXT}" VCPKG_ROOT) + else() + message(FATAL_ERROR "vcpkg not found. Install vcpkg if not installed, " + "then run vcpkg integrate install or set environment variable VCPKG_ROOT." + ) + endif() + find_program(_VCPKG_BIN vcpkg + PATHS "${VCPKG_ROOT}" + NO_DEFAULT_PATH) + if(NOT _VCPKG_BIN) + message(FATAL_ERROR "vcpkg not found. Re-run vcpkg integrate install " + "or set environment variable VCPKG_ROOT.") + endif() + endif() + endif() + set(CMAKE_TOOLCHAIN_FILE + "${VCPKG_ROOT}/scripts/buildsystems/vcpkg.cmake" + CACHE FILEPATH "Path to vcpkg CMake toolchain file") +endif() +message(STATUS "Using CMAKE_TOOLCHAIN_FILE: ${CMAKE_TOOLCHAIN_FILE}") +message(STATUS "Using VCPKG_ROOT: ${VCPKG_ROOT}") + +# ---------------------------------------------------------------------- +# Get VCPKG_TARGET_TRIPLET + +if(DEFINED ENV{VCPKG_DEFAULT_TRIPLET} AND NOT DEFINED VCPKG_TARGET_TRIPLET) + set(VCPKG_TARGET_TRIPLET "$ENV{VCPKG_DEFAULT_TRIPLET}") +endif() +# Explicitly set manifest mode on if it is not set and vcpkg.json exists +if(NOT DEFINED VCPKG_MANIFEST_MODE AND EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/vcpkg.json") + set(VCPKG_MANIFEST_MODE + ON + CACHE BOOL "Use vcpkg.json manifest") + message(STATUS "vcpkg.json manifest found. Using VCPKG_MANIFEST_MODE: ON") +endif() +if(NOT DEFINED VCPKG_TARGET_TRIPLET) + message(FATAL_ERROR "Could not infer VCPKG_TARGET_TRIPLET. " + "Specify triplet with -DVCPKG_TARGET_TRIPLET.") +endif() + +set(VCPKG_TARGET_TRIPLET + "${VCPKG_TARGET_TRIPLET}" + CACHE STRING "vcpkg triplet for the target environment") + +if(NOT DEFINED VCPKG_BUILD_TYPE) + set(VCPKG_BUILD_TYPE + "${LOWERCASE_BUILD_TYPE}" + CACHE STRING "vcpkg build type (release|debug)") +endif() + +if(NOT DEFINED VCPKG_LIBRARY_LINKAGE) + if(ARROW_DEPENDENCY_USE_SHARED) + set(VCPKG_LIBRARY_LINKAGE "dynamic") + else() + set(VCPKG_LIBRARY_LINKAGE "static") + endif() + set(VCPKG_LIBRARY_LINKAGE + "${VCPKG_LIBRARY_LINKAGE}" + CACHE STRING "vcpkg preferred library linkage (static|dynamic)") +endif() + +message(STATUS "Using vcpkg installed libraries directory: ${_VCPKG_INSTALLED_DIR}") +message(STATUS "Using VCPKG_TARGET_TRIPLET: ${VCPKG_TARGET_TRIPLET}") +message(STATUS "Using VCPKG_BUILD_TYPE: ${VCPKG_BUILD_TYPE}") +message(STATUS "Using VCPKG_LIBRARY_LINKAGE: ${VCPKG_LIBRARY_LINKAGE}") + +set(ARROW_VCPKG_PREFIX + "${_VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET}" + CACHE PATH "Path to target triplet subdirectory in vcpkg installed directory") + +set(ARROW_VCPKG + ON + CACHE BOOL "Use vcpkg for dependencies") + +set(ARROW_DEPENDENCY_SOURCE + "SYSTEM" + CACHE STRING "The specified value VCPKG is implemented internally as SYSTEM" FORCE) diff --git a/python/cmake_modules/orc-2345.patch b/python/cmake_modules/orc-2345.patch new file mode 100644 index 000000000000..ee5e38d6e6ac --- /dev/null +++ b/python/cmake_modules/orc-2345.patch @@ -0,0 +1,43 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +From a76249e13a6e364e0507a12cb71abaaf1647252e Mon Sep 17 00:00:00 2001 +From: Yuriy Chernyshov +Date: Thu, 31 Jul 2025 13:20:15 +0200 +Subject: [PATCH] Fix Windows build + +See +https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/byteswap-uint64-byteswap-ulong-byteswap-ushort?view=msvc-170 +--- + c++/src/Geospatial.cc | 4 ++-- + 1 file changed, 2 insertions(+), 2 deletions(-) + +diff --git a/c++/src/Geospatial.cc b/c++/src/Geospatial.cc +index 6d7d268703..2b110cacb6 100644 +--- a/c++/src/Geospatial.cc ++++ b/c++/src/Geospatial.cc +@@ -66,8 +66,8 @@ namespace orc::geospatial { + + #if defined(_MSC_VER) + #include // IWYU pragma: keep +-#define ORC_BYTE_SWAP64 _byteSwap_uint64 +-#define ORC_BYTE_SWAP32 _byteSwap_ulong ++#define ORC_BYTE_SWAP64 _byteswap_uint64 ++#define ORC_BYTE_SWAP32 _byteswap_ulong + #else + #define ORC_BYTE_SWAP64 __builtin_bswap64 + #define ORC_BYTE_SWAP32 __builtin_bswap32 diff --git a/python/cmake_modules/orc-2357.patch b/python/cmake_modules/orc-2357.patch new file mode 100644 index 000000000000..41096e10429a --- /dev/null +++ b/python/cmake_modules/orc-2357.patch @@ -0,0 +1,86 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +From a66baec5731b65a81189f48c242433d01580f344 Mon Sep 17 00:00:00 2001 +From: Dongjoon Hyun +Date: Fri, 15 Aug 2025 12:31:09 -0700 +Subject: [PATCH] ORC-1973: [C++] Use `int64_t` instead of + `google::protobuf::int64` + +--- + c++/src/io/InputStream.cc | 4 ++-- + c++/src/io/InputStream.hh | 2 +- + c++/src/io/OutputStream.cc | 4 ++-- + c++/src/io/OutputStream.hh | 2 +- + 4 files changed, 6 insertions(+), 6 deletions(-) + +diff --git a/c++/src/io/InputStream.cc b/c++/src/io/InputStream.cc +index 06ef40bd4c..5e1dc00ccd 100644 +--- a/c++/src/io/InputStream.cc ++++ b/c++/src/io/InputStream.cc +@@ -112,8 +112,8 @@ namespace orc { + return false; + } + +- google::protobuf::int64 SeekableArrayInputStream::ByteCount() const { +- return static_cast(position_); ++ int64_t SeekableArrayInputStream::ByteCount() const { ++ return static_cast(position_); + } + + void SeekableArrayInputStream::seek(PositionProvider& seekPosition) { +diff --git a/c++/src/io/InputStream.hh b/c++/src/io/InputStream.hh +index 07aa623b5f..8b251c9301 100644 +--- a/c++/src/io/InputStream.hh ++++ b/c++/src/io/InputStream.hh +@@ -72,7 +72,7 @@ namespace orc { + virtual bool Next(const void** data, int* size) override; + virtual void BackUp(int count) override; + virtual bool Skip(int count) override; +- virtual google::protobuf::int64 ByteCount() const override; ++ virtual int64_t ByteCount() const override; + virtual void seek(PositionProvider& position) override; + virtual std::string getName() const override; + }; +diff --git a/c++/src/io/OutputStream.cc b/c++/src/io/OutputStream.cc +index fbf1ca61dd..a55050d122 100644 +--- a/c++/src/io/OutputStream.cc ++++ b/c++/src/io/OutputStream.cc +@@ -65,8 +65,8 @@ namespace orc { + // PASS + } + +- google::protobuf::int64 BufferedOutputStream::ByteCount() const { +- return static_cast(dataBuffer_->size()); ++ int64_t BufferedOutputStream::ByteCount() const { ++ return static_cast(dataBuffer_->size()); + } + + bool BufferedOutputStream::WriteAliasedRaw(const void*, int) { +diff --git a/c++/src/io/OutputStream.hh b/c++/src/io/OutputStream.hh +index 6319de96d6..b029818125 100644 +--- a/c++/src/io/OutputStream.hh ++++ b/c++/src/io/OutputStream.hh +@@ -61,7 +61,7 @@ namespace orc { + + virtual bool Next(void** data, int* size) override; + virtual void BackUp(int count) override; +- virtual google::protobuf::int64 ByteCount() const override; ++ virtual int64_t ByteCount() const override; + virtual bool WriteAliasedRaw(const void* data, int size) override; + virtual bool AllowsAliasing() const override; + diff --git a/python/cmake_modules/san-config.cmake b/python/cmake_modules/san-config.cmake new file mode 100644 index 000000000000..8c2983e18b40 --- /dev/null +++ b/python/cmake_modules/san-config.cmake @@ -0,0 +1,123 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. See accompanying LICENSE file. + +# Clang does not support using ASAN and TSAN simultaneously. +if("${ARROW_USE_ASAN}" AND "${ARROW_USE_TSAN}") + message(SEND_ERROR "Can only enable one of ASAN or TSAN at a time") +endif() + +# Flag to enable clang address sanitizer +# This will only build if clang or a recent enough gcc is the chosen compiler +if(${ARROW_USE_ASAN}) + if(CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang" + OR CMAKE_CXX_COMPILER_ID STREQUAL "Clang" + OR (CMAKE_CXX_COMPILER_ID STREQUAL "GNU" AND CMAKE_CXX_COMPILER_VERSION + VERSION_GREATER "4.8")) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=address -DADDRESS_SANITIZER") + else() + message(SEND_ERROR "Cannot use ASAN without clang or gcc >= 4.8") + endif() +endif() + +# Flag to enable clang undefined behavior sanitizer +# We explicitly don't enable all of the sanitizer flags: +# - disable 'vptr' because of RTTI issues across shared libraries (?) +# - disable 'alignment' because unaligned access is really OK on Nehalem and we do it +# all over the place. +# - disable 'function' because it appears to give a false positive +# (https://github.com/google/sanitizers/issues/911) +# - disable 'float-divide-by-zero' on clang, which considers it UB +# (https://bugs.llvm.org/show_bug.cgi?id=17000#c1) +# Note: GCC does not support the 'function' flag. +if(${ARROW_USE_UBSAN}) + if(CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang" OR CMAKE_CXX_COMPILER_ID STREQUAL + "Clang") + set(CMAKE_CXX_FLAGS + "${CMAKE_CXX_FLAGS} -fsanitize=undefined -fno-sanitize=alignment,vptr,function,float-divide-by-zero -fno-sanitize-recover=all" + ) + elseif(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" AND CMAKE_CXX_COMPILER_VERSION + VERSION_GREATER_EQUAL "5.1") + set(CMAKE_CXX_FLAGS + "${CMAKE_CXX_FLAGS} -fsanitize=undefined -fno-sanitize=alignment,vptr -fno-sanitize-recover=all" + ) + else() + message(SEND_ERROR "Cannot use UBSAN without clang or gcc >= 5.1") + endif() +endif() + +# Flag to enable thread sanitizer (clang or gcc 4.8) +if(${ARROW_USE_TSAN}) + if(NOT + (CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang" + OR CMAKE_CXX_COMPILER_ID STREQUAL "Clang" + OR (CMAKE_CXX_COMPILER_ID STREQUAL "GNU" AND CMAKE_CXX_COMPILER_VERSION + VERSION_GREATER "4.8"))) + message(SEND_ERROR "Cannot use TSAN without clang or gcc >= 4.8") + endif() + + add_definitions("-fsanitize=thread") + + # Enables dynamic_annotations.h to actually generate code + add_definitions("-DDYNAMIC_ANNOTATIONS_ENABLED") + + # changes atomicops to use the tsan implementations + add_definitions("-DTHREAD_SANITIZER") + + # Disables using the precompiled template specializations for std::string, shared_ptr, etc + # so that the annotations in the header actually take effect. + add_definitions("-D_GLIBCXX_EXTERN_TEMPLATE=0") + + # Some of the above also need to be passed to the linker. + set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -pie -fsanitize=thread") + set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -pie -fsanitize=thread") + + # Strictly speaking, TSAN doesn't require dynamic linking. But it does + # require all code to be position independent, and the easiest way to + # guarantee that is via dynamic linking (not all 3rd party archives are + # compiled with -fPIC e.g. boost). + if("${ARROW_LINK}" STREQUAL "a") + message(STATUS "Using dynamic linking for TSAN") + set(ARROW_LINK "d") + elseif("${ARROW_LINK}" STREQUAL "s") + message(SEND_ERROR "Cannot use TSAN with static linking") + endif() +endif() + +if(${ARROW_USE_COVERAGE}) + if(CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang" OR CMAKE_CXX_COMPILER_ID STREQUAL + "Clang") + add_definitions("-fsanitize-coverage=pc-table,inline-8bit-counters,edge,no-prune,trace-cmp,trace-div,trace-gep" + ) + + set(CMAKE_CXX_FLAGS + "${CMAKE_CXX_FLAGS} -fsanitize-coverage=pc-table,inline-8bit-counters,edge,no-prune,trace-cmp,trace-div,trace-gep" + ) + else() + message(SEND_ERROR "You can only enable coverage with clang") + endif() +endif() + +if("${ARROW_USE_UBSAN}" + OR "${ARROW_USE_ASAN}" + OR "${ARROW_USE_TSAN}") + # GCC 4.8 and 4.9 (latest as of this writing) don't allow you to specify + # disallowed entries for the sanitizer. + if(CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang" OR CMAKE_CXX_COMPILER_ID STREQUAL + "Clang") + set(CMAKE_CXX_FLAGS + "${CMAKE_CXX_FLAGS} -fsanitize-blacklist=${BUILD_SUPPORT_DIR}/sanitizer-disallowed-entries.txt" + ) + else() + message(WARNING "GCC does not support specifying a sanitizer disallowed entries list. Known sanitizer check failures will not be suppressed." + ) + endif() +endif() diff --git a/python/cmake_modules/thrift-3187.patch b/python/cmake_modules/thrift-3187.patch new file mode 100644 index 000000000000..44a916148810 --- /dev/null +++ b/python/cmake_modules/thrift-3187.patch @@ -0,0 +1,162 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +From ad893633097b05ecdba8aa0f27aaf173dc7839b2 Mon Sep 17 00:00:00 2001 +From: Sutou Kouhei +Date: Fri, 8 Aug 2025 16:19:10 +0900 +Subject: [PATCH] THRIFT-3268: Suppress gnu-zero-variadic-macro-arguments + warnings + +Client: cpp + +We can reproduce these warnings by: + + CC=clang CXX=clang++ \ + cmake \ + -S . \ + -B ../thrift.build \ + -DWITH_{AS3,JAVA,JAVASCRIPT,NODEJS,PYTHON,C_GLIB}=OFF \ + -DCMAKE_CXX_FLAGS="-Wgnu-zero-variadic-macro-arguments" + cmake --build ../thrift.build + +Sample warning: + + lib/cpp/src/thrift/TLogging.h:119:13: warning: token pasting of ',' and __VA_ARGS__ is a GNU extension [-Wgnu-zero-variadic-macro-arguments] + 119 | ##__VA_ARGS__); \ + | ^ +--- + lib/cpp/src/thrift/TLogging.h | 12 ++++++------ + lib/cpp/test/TransportTest.cpp | 12 ++++++------ + lib/cpp/test/ZlibTest.cpp | 6 ++---- + 3 files changed, 14 insertions(+), 16 deletions(-) + +diff --git a/lib/cpp/src/thrift/TLogging.h b/lib/cpp/src/thrift/TLogging.h +index 07ff030f7da..64e9bf80bbb 100644 +--- a/lib/cpp/src/thrift/TLogging.h ++++ b/lib/cpp/src/thrift/TLogging.h +@@ -55,7 +55,7 @@ + #if T_GLOBAL_DEBUGGING_LEVEL > 0 + #define T_DEBUG(format_string, ...) \ + if (T_GLOBAL_DEBUGGING_LEVEL > 0) { \ +- fprintf(stderr, "[%s,%d] " format_string " \n", __FILE__, __LINE__, ##__VA_ARGS__); \ ++ fprintf(stderr, "[%s,%d] " format_string " \n", __FILE__, __LINE__, __VA_ARGS__); \ + } + #else + #define T_DEBUG(format_string, ...) +@@ -80,7 +80,7 @@ + __FILE__, \ + __LINE__, \ + dbgtime, \ +- ##__VA_ARGS__); \ ++ __VA_ARGS__); \ + } \ + } + #else +@@ -96,7 +96,7 @@ + */ + #define T_DEBUG_L(level, format_string, ...) \ + if ((level) > 0) { \ +- fprintf(stderr, "[%s,%d] " format_string " \n", __FILE__, __LINE__, ##__VA_ARGS__); \ ++ fprintf(stderr, "[%s,%d] " format_string " \n", __FILE__, __LINE__, __VA_ARGS__); \ + } + + /** +@@ -116,7 +116,7 @@ + __FILE__, \ + __LINE__, \ + dbgtime, \ +- ##__VA_ARGS__); \ ++ __VA_ARGS__); \ + } + + /** +@@ -137,7 +137,7 @@ + __FILE__, \ + __LINE__, \ + dbgtime, \ +- ##__VA_ARGS__); \ ++ __VA_ARGS__); \ + exit(1); \ + } + +@@ -155,7 +155,7 @@ + time(&now); \ + THRIFT_CTIME_R(&now, dbgtime); \ + dbgtime[24] = '\0'; \ +- fprintf(stderr, "[%s] " format_string " \n", dbgtime, ##__VA_ARGS__); \ ++ fprintf(stderr, "[%s] " format_string " \n", dbgtime, __VA_ARGS__); \ + } \ + } + #else +diff --git a/lib/cpp/test/TransportTest.cpp b/lib/cpp/test/TransportTest.cpp +index d6d38595a6b..8a05465773a 100644 +--- a/lib/cpp/test/TransportTest.cpp ++++ b/lib/cpp/test/TransportTest.cpp +@@ -784,23 +784,23 @@ void test_borrow_none_available() { + **************************************************************************/ + + #define ADD_TEST_RW(CoupledTransports, totalSize, ...) \ +- addTestRW(BOOST_STRINGIZE(CoupledTransports), totalSize, ##__VA_ARGS__); ++ addTestRW(BOOST_STRINGIZE(CoupledTransports), totalSize, __VA_ARGS__); + + #define TEST_RW(CoupledTransports, totalSize, ...) \ + do { \ + /* Add the test as specified, to test the non-virtual function calls */ \ +- ADD_TEST_RW(CoupledTransports, totalSize, ##__VA_ARGS__); \ ++ ADD_TEST_RW(CoupledTransports, totalSize, __VA_ARGS__); \ + /* \ + * Also test using the transport as a TTransport*, to test \ + * the read_virt()/write_virt() calls \ + */ \ +- ADD_TEST_RW(CoupledTTransports, totalSize, ##__VA_ARGS__); \ ++ ADD_TEST_RW(CoupledTTransports, totalSize, __VA_ARGS__); \ + /* Test wrapping the transport with TBufferedTransport */ \ +- ADD_TEST_RW(CoupledBufferedTransportsT, totalSize, ##__VA_ARGS__); \ ++ ADD_TEST_RW(CoupledBufferedTransportsT, totalSize, __VA_ARGS__); \ + /* Test wrapping the transport with TFramedTransports */ \ +- ADD_TEST_RW(CoupledFramedTransportsT, totalSize, ##__VA_ARGS__); \ ++ ADD_TEST_RW(CoupledFramedTransportsT, totalSize, __VA_ARGS__); \ + /* Test wrapping the transport with TZlibTransport */ \ +- ADD_TEST_RW(CoupledZlibTransportsT, totalSize, ##__VA_ARGS__); \ ++ ADD_TEST_RW(CoupledZlibTransportsT, totalSize, __VA_ARGS__); \ + } while (0) + + #define ADD_TEST_BLOCKING(CoupledTransports) \ +diff --git a/lib/cpp/test/ZlibTest.cpp b/lib/cpp/test/ZlibTest.cpp +index 274a243913c..ea9c617f625 100644 +--- a/lib/cpp/test/ZlibTest.cpp ++++ b/lib/cpp/test/ZlibTest.cpp +@@ -347,8 +347,7 @@ void test_get_underlying_transport() { + do { \ + ::std::ostringstream name_ss; \ + name_ss << name << "-" << BOOST_STRINGIZE(_FUNC); \ +- ::std::function test_func = \ +- ::std::bind(_FUNC, ##__VA_ARGS__); \ ++ ::std::function test_func = ::std::bind(_FUNC, __VA_ARGS__); \ + ::boost::unit_test::test_case* tc \ + = ::boost::unit_test::make_test_case(test_func, name_ss.str(), __FILE__, __LINE__); \ + (suite)->add(tc); \ +@@ -359,8 +358,7 @@ void test_get_underlying_transport() { + ::std::ostringstream name_ss; \ + name_ss << name << "-" << BOOST_STRINGIZE(_FUNC); \ + ::boost::unit_test::test_case* tc \ +- = ::boost::unit_test::make_test_case(::std::bind(_FUNC, \ +- ##__VA_ARGS__), \ ++ = ::boost::unit_test::make_test_case(::std::bind(_FUNC, __VA_ARGS__), \ + name_ss.str()); \ + (suite)->add(tc); \ + } while (0)